var/home/core/zuul-output/0000755000175000017500000000000015111557615014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015111566561015500 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004755603715111566553017726 0ustar rootrootNov 26 11:10:39 crc systemd[1]: Starting Kubernetes Kubelet... Nov 26 11:10:39 crc restorecon[4621]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:39 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 26 11:10:40 crc restorecon[4621]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 26 11:10:40 crc kubenswrapper[4622]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.583459 4622 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587413 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587434 4622 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587440 4622 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587446 4622 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587452 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587457 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587461 4622 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587466 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587470 4622 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587474 4622 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587478 4622 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587482 4622 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587486 4622 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587490 4622 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587494 4622 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587529 4622 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587534 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587539 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587543 4622 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587547 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587552 4622 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587558 4622 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587563 4622 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587568 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587573 4622 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587577 4622 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587581 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587587 4622 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587592 4622 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587597 4622 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587601 4622 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587605 4622 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587611 4622 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587616 4622 feature_gate.go:330] unrecognized feature gate: Example Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587621 4622 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587627 4622 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587632 4622 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587637 4622 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587641 4622 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587645 4622 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587649 4622 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587653 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587658 4622 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587663 4622 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587667 4622 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587671 4622 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587675 4622 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587679 4622 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587683 4622 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587687 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587691 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587695 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587699 4622 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587703 4622 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587707 4622 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587711 4622 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587715 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587720 4622 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587724 4622 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587729 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587733 4622 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587737 4622 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587741 4622 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587754 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587759 4622 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587763 4622 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587766 4622 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587770 4622 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587774 4622 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587778 4622 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.587782 4622 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587873 4622 flags.go:64] FLAG: --address="0.0.0.0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587880 4622 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587891 4622 flags.go:64] FLAG: --anonymous-auth="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587910 4622 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587915 4622 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587918 4622 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587923 4622 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587929 4622 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587935 4622 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587938 4622 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587942 4622 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587946 4622 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587949 4622 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587953 4622 flags.go:64] FLAG: --cgroup-root="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587956 4622 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587960 4622 flags.go:64] FLAG: --client-ca-file="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587965 4622 flags.go:64] FLAG: --cloud-config="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587969 4622 flags.go:64] FLAG: --cloud-provider="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587972 4622 flags.go:64] FLAG: --cluster-dns="[]" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587977 4622 flags.go:64] FLAG: --cluster-domain="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587981 4622 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587985 4622 flags.go:64] FLAG: --config-dir="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587989 4622 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587993 4622 flags.go:64] FLAG: --container-log-max-files="5" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.587998 4622 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588002 4622 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588006 4622 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588010 4622 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588014 4622 flags.go:64] FLAG: --contention-profiling="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588018 4622 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588021 4622 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588025 4622 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588028 4622 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588033 4622 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588038 4622 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588042 4622 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588045 4622 flags.go:64] FLAG: --enable-load-reader="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588049 4622 flags.go:64] FLAG: --enable-server="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588053 4622 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588057 4622 flags.go:64] FLAG: --event-burst="100" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588061 4622 flags.go:64] FLAG: --event-qps="50" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588065 4622 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588069 4622 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588072 4622 flags.go:64] FLAG: --eviction-hard="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588076 4622 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588080 4622 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588083 4622 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588087 4622 flags.go:64] FLAG: --eviction-soft="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588091 4622 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588095 4622 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588098 4622 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588102 4622 flags.go:64] FLAG: --experimental-mounter-path="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588106 4622 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588109 4622 flags.go:64] FLAG: --fail-swap-on="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588113 4622 flags.go:64] FLAG: --feature-gates="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588117 4622 flags.go:64] FLAG: --file-check-frequency="20s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588121 4622 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588125 4622 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588129 4622 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588132 4622 flags.go:64] FLAG: --healthz-port="10248" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588136 4622 flags.go:64] FLAG: --help="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588140 4622 flags.go:64] FLAG: --hostname-override="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588143 4622 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588146 4622 flags.go:64] FLAG: --http-check-frequency="20s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588150 4622 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588154 4622 flags.go:64] FLAG: --image-credential-provider-config="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588158 4622 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588161 4622 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588165 4622 flags.go:64] FLAG: --image-service-endpoint="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588168 4622 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588172 4622 flags.go:64] FLAG: --kube-api-burst="100" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588175 4622 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588179 4622 flags.go:64] FLAG: --kube-api-qps="50" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588183 4622 flags.go:64] FLAG: --kube-reserved="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588186 4622 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588190 4622 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588194 4622 flags.go:64] FLAG: --kubelet-cgroups="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588197 4622 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588201 4622 flags.go:64] FLAG: --lock-file="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588204 4622 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588208 4622 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588212 4622 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588217 4622 flags.go:64] FLAG: --log-json-split-stream="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588221 4622 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588224 4622 flags.go:64] FLAG: --log-text-split-stream="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588228 4622 flags.go:64] FLAG: --logging-format="text" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588231 4622 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588235 4622 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588239 4622 flags.go:64] FLAG: --manifest-url="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588242 4622 flags.go:64] FLAG: --manifest-url-header="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588247 4622 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588251 4622 flags.go:64] FLAG: --max-open-files="1000000" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588255 4622 flags.go:64] FLAG: --max-pods="110" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588259 4622 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588263 4622 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588267 4622 flags.go:64] FLAG: --memory-manager-policy="None" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588270 4622 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588274 4622 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588278 4622 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588281 4622 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588290 4622 flags.go:64] FLAG: --node-status-max-images="50" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588294 4622 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588297 4622 flags.go:64] FLAG: --oom-score-adj="-999" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588301 4622 flags.go:64] FLAG: --pod-cidr="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588305 4622 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588310 4622 flags.go:64] FLAG: --pod-manifest-path="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588313 4622 flags.go:64] FLAG: --pod-max-pids="-1" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588317 4622 flags.go:64] FLAG: --pods-per-core="0" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588321 4622 flags.go:64] FLAG: --port="10250" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588325 4622 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588328 4622 flags.go:64] FLAG: --provider-id="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588332 4622 flags.go:64] FLAG: --qos-reserved="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588335 4622 flags.go:64] FLAG: --read-only-port="10255" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588339 4622 flags.go:64] FLAG: --register-node="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588343 4622 flags.go:64] FLAG: --register-schedulable="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588346 4622 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588353 4622 flags.go:64] FLAG: --registry-burst="10" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588357 4622 flags.go:64] FLAG: --registry-qps="5" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588360 4622 flags.go:64] FLAG: --reserved-cpus="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588364 4622 flags.go:64] FLAG: --reserved-memory="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588368 4622 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588372 4622 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588376 4622 flags.go:64] FLAG: --rotate-certificates="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588380 4622 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588384 4622 flags.go:64] FLAG: --runonce="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588387 4622 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588391 4622 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588395 4622 flags.go:64] FLAG: --seccomp-default="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588398 4622 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588402 4622 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588406 4622 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588410 4622 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588413 4622 flags.go:64] FLAG: --storage-driver-password="root" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588417 4622 flags.go:64] FLAG: --storage-driver-secure="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588421 4622 flags.go:64] FLAG: --storage-driver-table="stats" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588424 4622 flags.go:64] FLAG: --storage-driver-user="root" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588428 4622 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588431 4622 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588435 4622 flags.go:64] FLAG: --system-cgroups="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588438 4622 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588444 4622 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588447 4622 flags.go:64] FLAG: --tls-cert-file="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588451 4622 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588455 4622 flags.go:64] FLAG: --tls-min-version="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588459 4622 flags.go:64] FLAG: --tls-private-key-file="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588463 4622 flags.go:64] FLAG: --topology-manager-policy="none" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588466 4622 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588470 4622 flags.go:64] FLAG: --topology-manager-scope="container" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588473 4622 flags.go:64] FLAG: --v="2" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588478 4622 flags.go:64] FLAG: --version="false" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588483 4622 flags.go:64] FLAG: --vmodule="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588487 4622 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588491 4622 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588587 4622 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588593 4622 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588596 4622 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588600 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588603 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588606 4622 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588610 4622 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588614 4622 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588617 4622 feature_gate.go:330] unrecognized feature gate: Example Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588621 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588624 4622 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588627 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588630 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588634 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588637 4622 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588640 4622 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588643 4622 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588647 4622 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588650 4622 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588653 4622 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588656 4622 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588659 4622 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588663 4622 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588666 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588669 4622 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588672 4622 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588675 4622 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588679 4622 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588682 4622 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588685 4622 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588688 4622 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588691 4622 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588694 4622 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588699 4622 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588702 4622 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588706 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588709 4622 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588715 4622 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588719 4622 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588722 4622 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588725 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588729 4622 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588732 4622 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588735 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588738 4622 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588741 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588755 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588758 4622 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588762 4622 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588765 4622 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588768 4622 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588771 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588776 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588780 4622 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588784 4622 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588788 4622 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588792 4622 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588795 4622 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588799 4622 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588802 4622 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588805 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588809 4622 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588813 4622 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588817 4622 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588821 4622 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588825 4622 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588828 4622 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588831 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588834 4622 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588840 4622 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.588844 4622 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.588855 4622 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.596272 4622 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.596423 4622 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596478 4622 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596486 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596490 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596495 4622 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596513 4622 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596519 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596523 4622 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596527 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596531 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596535 4622 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596538 4622 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596542 4622 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596546 4622 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596549 4622 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596553 4622 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596556 4622 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596560 4622 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596563 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596566 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596569 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596572 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596577 4622 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596580 4622 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596583 4622 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596587 4622 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596591 4622 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596594 4622 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596605 4622 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596609 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596614 4622 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596617 4622 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596621 4622 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596624 4622 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596628 4622 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596631 4622 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596634 4622 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596638 4622 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596641 4622 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596644 4622 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596648 4622 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596651 4622 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596654 4622 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596658 4622 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596663 4622 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596666 4622 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596669 4622 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596672 4622 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596676 4622 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596679 4622 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596682 4622 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596685 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596688 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596691 4622 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596694 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596697 4622 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596701 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596707 4622 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596712 4622 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596716 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596720 4622 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596724 4622 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596727 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596731 4622 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596734 4622 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596737 4622 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596740 4622 feature_gate.go:330] unrecognized feature gate: Example Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596743 4622 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596757 4622 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596761 4622 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596764 4622 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596767 4622 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.596773 4622 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596868 4622 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596874 4622 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596877 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596881 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596884 4622 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596888 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596891 4622 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596894 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596898 4622 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596901 4622 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596904 4622 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596907 4622 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596910 4622 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596914 4622 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596918 4622 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596921 4622 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596924 4622 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596928 4622 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596932 4622 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596936 4622 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596940 4622 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596945 4622 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596949 4622 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596953 4622 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596956 4622 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596959 4622 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596963 4622 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596967 4622 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596970 4622 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596974 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596977 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596981 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596984 4622 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596988 4622 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596991 4622 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596994 4622 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.596997 4622 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597000 4622 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597004 4622 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597007 4622 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597010 4622 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597013 4622 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597016 4622 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597020 4622 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597023 4622 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597026 4622 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597029 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597033 4622 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597036 4622 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597039 4622 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597043 4622 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597046 4622 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597051 4622 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597055 4622 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597058 4622 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597062 4622 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597065 4622 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597068 4622 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597072 4622 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597077 4622 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597081 4622 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597084 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597088 4622 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597092 4622 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597096 4622 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597100 4622 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597104 4622 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597108 4622 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597112 4622 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597115 4622 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.597119 4622 feature_gate.go:330] unrecognized feature gate: Example Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.597124 4622 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.597578 4622 server.go:940] "Client rotation is on, will bootstrap in background" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.600551 4622 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.600622 4622 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.601572 4622 server.go:997] "Starting client certificate rotation" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.601600 4622 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.601780 4622 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-09 23:41:18.635968815 +0000 UTC Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.601852 4622 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1068h30m38.034119595s for next certificate rotation Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.615410 4622 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.616823 4622 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.625956 4622 log.go:25] "Validated CRI v1 runtime API" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.642536 4622 log.go:25] "Validated CRI v1 image API" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.643739 4622 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.647099 4622 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-26-11-07-01-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.647144 4622 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:43 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-44:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:44 fsType:overlay blockSize:0}] Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.660865 4622 manager.go:217] Machine: {Timestamp:2025-11-26 11:10:40.659467891 +0000 UTC m=+0.250679433 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445406 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:cec42615-21c9-4929-b427-d0d60aa981a6 BootID:c657949b-a8f1-4d3e-9908-1148dfba75d3 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:overlay_0-44 DeviceMajor:0 DeviceMinor:44 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:43 Capacity:65536000 Type:vfs Inodes:4108168 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:53:40:d9 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:53:40:d9 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:aa:64:00 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:c0:ba:a3 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:fe:01:d4 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:1b:1b:15 Speed:-1 Mtu:1436} {Name:enp7s0.23 MacAddress:52:54:00:a0:2a:75 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:62:fe:fe:95:1b:d6 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:e6:50:ba:56:e1:a7 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661038 4622 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661131 4622 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661421 4622 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661632 4622 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661672 4622 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661870 4622 topology_manager.go:138] "Creating topology manager with none policy" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.661881 4622 container_manager_linux.go:303] "Creating device plugin manager" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.662320 4622 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.662349 4622 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.662459 4622 state_mem.go:36] "Initialized new in-memory state store" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.662555 4622 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.664201 4622 kubelet.go:418] "Attempting to sync node with API server" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.664221 4622 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.664235 4622 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.664244 4622 kubelet.go:324] "Adding apiserver pod source" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.664253 4622 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.666375 4622 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.667380 4622 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.668020 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.668013 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.668111 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.668126 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.668703 4622 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669714 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669733 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669740 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669746 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669767 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669773 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669780 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669791 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669798 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669806 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669814 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.669820 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.670217 4622 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.670565 4622 server.go:1280] "Started kubelet" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.670796 4622 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.671197 4622 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.671208 4622 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.671552 4622 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 26 11:10:40 crc systemd[1]: Started Kubernetes Kubelet. Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.672580 4622 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673106 4622 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673413 4622 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673431 4622 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673130 4622 server.go:460] "Adding debug handlers to kubelet server" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673529 4622 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.673126 4622 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 15:52:42.728181357 +0000 UTC Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.673776 4622 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.675580 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.675681 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.676088 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="200ms" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.676303 4622 factory.go:55] Registering systemd factory Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.676322 4622 factory.go:221] Registration of the systemd container factory successfully Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.677145 4622 factory.go:153] Registering CRI-O factory Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.677167 4622 factory.go:221] Registration of the crio container factory successfully Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.677214 4622 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.677236 4622 factory.go:103] Registering Raw factory Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.677249 4622 manager.go:1196] Started watching for new ooms in manager Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.676323 4622 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.254:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187b8a06c1bd79ba default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-26 11:10:40.67053817 +0000 UTC m=+0.261749682,LastTimestamp:2025-11-26 11:10:40.67053817 +0000 UTC m=+0.261749682,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.678002 4622 manager.go:319] Starting recovery of all containers Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684608 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684710 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684782 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684838 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684888 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684944 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.684996 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685044 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685093 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685142 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685189 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685244 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.685961 4622 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686034 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686091 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686144 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686213 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686266 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686314 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686362 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686412 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686465 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686542 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686598 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686645 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686697 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686766 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686823 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686873 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686921 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.686968 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687041 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687099 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687148 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687195 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687250 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687305 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687363 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687426 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687477 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687561 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687629 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687697 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687768 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687829 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687892 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.687944 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688002 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688059 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688110 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688156 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688201 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688254 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688316 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688367 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688415 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688461 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688527 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688587 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688637 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688683 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688728 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688792 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688845 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688900 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.688948 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689001 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689051 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689102 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689162 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689214 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689298 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689350 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689407 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689466 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689532 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689591 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.689642 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690290 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690375 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690431 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690600 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690671 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690721 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690801 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690867 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690932 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.690980 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691026 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691079 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691145 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691196 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691249 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691307 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691358 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.691416 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692042 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692069 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692080 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692088 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692097 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692108 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692116 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692125 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692133 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692145 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692155 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692164 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692173 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692182 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692191 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692203 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692212 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692221 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692230 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692237 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692248 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692256 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692264 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692272 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692280 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692287 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692296 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692304 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692311 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692319 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692327 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692339 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692347 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692354 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692363 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692371 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692377 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692385 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692393 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692401 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692408 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692415 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692423 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692431 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692439 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692446 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692454 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692461 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692468 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692475 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692484 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692491 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692514 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692522 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692529 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692537 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692545 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692571 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692578 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692585 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692594 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692601 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692608 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692616 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692623 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692631 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692638 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692646 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692653 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692661 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692668 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692674 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692692 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692699 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692706 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692713 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692720 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692728 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692734 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692742 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692749 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692769 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692777 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692785 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692791 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692799 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692805 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692812 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692820 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692828 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692836 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692844 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692851 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692858 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692866 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692873 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692879 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692886 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692893 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692900 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692906 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692913 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692920 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692927 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692934 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692941 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692948 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692955 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692962 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692970 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692978 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692985 4622 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692992 4622 reconstruct.go:97] "Volume reconstruction finished" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.692998 4622 reconciler.go:26] "Reconciler: start to sync state" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.695988 4622 manager.go:324] Recovery completed Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.702692 4622 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.704447 4622 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.704768 4622 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.704810 4622 kubelet.go:2335] "Starting kubelet main sync loop" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.704865 4622 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 26 11:10:40 crc kubenswrapper[4622]: W1126 11:10:40.705257 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.705293 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.706366 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.707274 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.707349 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.707411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.709385 4622 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.709403 4622 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.709420 4622 state_mem.go:36] "Initialized new in-memory state store" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.714967 4622 policy_none.go:49] "None policy: Start" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.715743 4622 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.715775 4622 state_mem.go:35] "Initializing new in-memory state store" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.760466 4622 manager.go:334] "Starting Device Plugin manager" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.760529 4622 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.760540 4622 server.go:79] "Starting device plugin registration server" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.760894 4622 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.760905 4622 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.761160 4622 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.761224 4622 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.761231 4622 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.769360 4622 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.805134 4622 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.805230 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806100 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806108 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806191 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806353 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806383 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806805 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806856 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.806865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807008 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807154 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807245 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807546 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807579 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807590 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.807994 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808073 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808215 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808243 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808267 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808282 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808306 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808522 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808538 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808549 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808677 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808751 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808781 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808791 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.808825 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809359 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809365 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809377 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809384 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809402 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809573 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.809615 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.810185 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.810223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.810231 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.860962 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.861608 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.861630 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.861639 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.861656 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.862150 4622 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.254:6443: connect: connection refused" node="crc" Nov 26 11:10:40 crc kubenswrapper[4622]: E1126 11:10:40.877167 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="400ms" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895233 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895261 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895298 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895313 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895328 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895341 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895408 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895423 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895456 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895470 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895482 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895494 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895540 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895553 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.895564 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996364 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996427 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996444 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996458 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996532 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996574 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996570 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996605 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996621 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996634 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996651 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996633 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996696 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996716 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996712 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996585 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996755 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996719 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996834 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996815 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996875 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996900 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996926 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996942 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996957 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.996978 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.997014 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.997055 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.997076 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:40 crc kubenswrapper[4622]: I1126 11:10:40.997095 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.062528 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.063467 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.063496 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.063517 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.063540 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:10:41 crc kubenswrapper[4622]: E1126 11:10:41.063951 4622 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.254:6443: connect: connection refused" node="crc" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.138044 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.155895 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.157008 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-d7b5f483ee3f716802e1022abd2cedf0d43a8654ce2a477098f3f6b84145b66b WatchSource:0}: Error finding container d7b5f483ee3f716802e1022abd2cedf0d43a8654ce2a477098f3f6b84145b66b: Status 404 returned error can't find the container with id d7b5f483ee3f716802e1022abd2cedf0d43a8654ce2a477098f3f6b84145b66b Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.161087 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.172316 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a8636b15f529a2f3d4ffe93b83d4e123ba34cf514d70b68f2fcecb5e3163710e WatchSource:0}: Error finding container a8636b15f529a2f3d4ffe93b83d4e123ba34cf514d70b68f2fcecb5e3163710e: Status 404 returned error can't find the container with id a8636b15f529a2f3d4ffe93b83d4e123ba34cf514d70b68f2fcecb5e3163710e Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.172811 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-302735b7117712bf5416e6942e8cafb7db84f1b4dfd7b9f5e1ba354a0b4fd58d WatchSource:0}: Error finding container 302735b7117712bf5416e6942e8cafb7db84f1b4dfd7b9f5e1ba354a0b4fd58d: Status 404 returned error can't find the container with id 302735b7117712bf5416e6942e8cafb7db84f1b4dfd7b9f5e1ba354a0b4fd58d Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.177761 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.182324 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.188913 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-210d8dfa1e112b1f4997c7b85ac9ab762b5c50c7c3cdd7244b725e0eabdfaa86 WatchSource:0}: Error finding container 210d8dfa1e112b1f4997c7b85ac9ab762b5c50c7c3cdd7244b725e0eabdfaa86: Status 404 returned error can't find the container with id 210d8dfa1e112b1f4997c7b85ac9ab762b5c50c7c3cdd7244b725e0eabdfaa86 Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.191035 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-48df4c36b070c238d0fe0d79e6a9adfabdcfcb917dec62a87a80d5f15cd76014 WatchSource:0}: Error finding container 48df4c36b070c238d0fe0d79e6a9adfabdcfcb917dec62a87a80d5f15cd76014: Status 404 returned error can't find the container with id 48df4c36b070c238d0fe0d79e6a9adfabdcfcb917dec62a87a80d5f15cd76014 Nov 26 11:10:41 crc kubenswrapper[4622]: E1126 11:10:41.277716 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="800ms" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.464702 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.465876 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.465908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.465917 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.465938 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:10:41 crc kubenswrapper[4622]: E1126 11:10:41.466224 4622 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.254:6443: connect: connection refused" node="crc" Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.644183 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:41 crc kubenswrapper[4622]: E1126 11:10:41.644261 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.672240 4622 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.674431 4622 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 20:43:48.594143653 +0000 UTC Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.709282 4622 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="60ae6ff20f7ae99518ea2c3c70d6c2f1bda51adc5d49a3fc254cbd916b385bf7" exitCode=0 Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.709333 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"60ae6ff20f7ae99518ea2c3c70d6c2f1bda51adc5d49a3fc254cbd916b385bf7"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.709394 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a8636b15f529a2f3d4ffe93b83d4e123ba34cf514d70b68f2fcecb5e3163710e"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.709459 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710238 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710266 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710275 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710766 4622 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a" exitCode=0 Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710822 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710846 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d7b5f483ee3f716802e1022abd2cedf0d43a8654ce2a477098f3f6b84145b66b"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.710906 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.711467 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.711489 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.711513 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.712151 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.712176 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"48df4c36b070c238d0fe0d79e6a9adfabdcfcb917dec62a87a80d5f15cd76014"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.713465 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f" exitCode=0 Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.713527 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.713576 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"210d8dfa1e112b1f4997c7b85ac9ab762b5c50c7c3cdd7244b725e0eabdfaa86"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.713659 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714210 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714236 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714281 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714911 4622 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774" exitCode=0 Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714929 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.714942 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"302735b7117712bf5416e6942e8cafb7db84f1b4dfd7b9f5e1ba354a0b4fd58d"} Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.715010 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.715670 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.715690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.715699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.716764 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.717446 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.717471 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:41 crc kubenswrapper[4622]: I1126 11:10:41.717479 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:41 crc kubenswrapper[4622]: W1126 11:10:41.908355 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:41 crc kubenswrapper[4622]: E1126 11:10:41.908443 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:42 crc kubenswrapper[4622]: E1126 11:10:42.078987 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="1.6s" Nov 26 11:10:42 crc kubenswrapper[4622]: W1126 11:10:42.184028 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:42 crc kubenswrapper[4622]: E1126 11:10:42.184359 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:42 crc kubenswrapper[4622]: W1126 11:10:42.201305 4622 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.254:6443: connect: connection refused Nov 26 11:10:42 crc kubenswrapper[4622]: E1126 11:10:42.201376 4622 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.254:6443: connect: connection refused" logger="UnhandledError" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.266584 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.267563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.267594 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.267604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.267627 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:10:42 crc kubenswrapper[4622]: E1126 11:10:42.267961 4622 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.254:6443: connect: connection refused" node="crc" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.675401 4622 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 15:29:11.132656571 +0000 UTC Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.675450 4622 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 508h18m28.457208209s for next certificate rotation Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.718636 4622 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695" exitCode=0 Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.718694 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.718799 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.719621 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.719643 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.719652 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.720320 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9a072884c4921828755a6b41ab196860879a82fe85dec4136dc49ae0a5f3166b"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.720420 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.721087 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.721118 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.721129 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.723427 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.723459 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.723469 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.723576 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.724350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.724378 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.724387 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726160 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726191 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726216 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726826 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.726858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.728999 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729028 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729039 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729048 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729056 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd"} Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729128 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729719 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729743 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:42 crc kubenswrapper[4622]: I1126 11:10:42.729752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732368 4622 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5" exitCode=0 Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732435 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5"} Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732452 4622 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732480 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732532 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.732553 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733255 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733263 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733462 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733495 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733468 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733541 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.733553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.868048 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.868796 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.868824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.868832 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:43 crc kubenswrapper[4622]: I1126 11:10:43.868851 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.428719 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.620209 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738860 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c"} Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738898 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430"} Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738910 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906"} Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738918 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6"} Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738927 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9"} Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738920 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.738983 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739734 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739767 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739776 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739784 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739794 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.739786 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:44 crc kubenswrapper[4622]: I1126 11:10:44.760330 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.118256 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.118357 4622 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.118388 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.119744 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.119785 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.119812 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.740721 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.740742 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741719 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741731 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741746 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741756 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741757 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.741855 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.763893 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.767776 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.966922 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.967011 4622 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.967032 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.968006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.968039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:45 crc kubenswrapper[4622]: I1126 11:10:45.968048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.745045 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.745078 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.745974 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.745996 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.746004 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.746053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.746071 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:46 crc kubenswrapper[4622]: I1126 11:10:46.746080 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.429255 4622 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.429344 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.746997 4622 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.747044 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.747726 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.747754 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:47 crc kubenswrapper[4622]: I1126 11:10:47.747762 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:48 crc kubenswrapper[4622]: I1126 11:10:48.435671 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:48 crc kubenswrapper[4622]: I1126 11:10:48.748610 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:48 crc kubenswrapper[4622]: I1126 11:10:48.749297 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:48 crc kubenswrapper[4622]: I1126 11:10:48.749323 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:48 crc kubenswrapper[4622]: I1126 11:10:48.749332 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:49 crc kubenswrapper[4622]: I1126 11:10:49.946560 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:10:49 crc kubenswrapper[4622]: I1126 11:10:49.946688 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:49 crc kubenswrapper[4622]: I1126 11:10:49.947401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:49 crc kubenswrapper[4622]: I1126 11:10:49.947483 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:49 crc kubenswrapper[4622]: I1126 11:10:49.947572 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.711919 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.712086 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.713050 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.713078 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.713087 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:50 crc kubenswrapper[4622]: E1126 11:10:50.769931 4622 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.908647 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.908749 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.909417 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.909437 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:50 crc kubenswrapper[4622]: I1126 11:10:50.909445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:52 crc kubenswrapper[4622]: I1126 11:10:52.672809 4622 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 26 11:10:53 crc kubenswrapper[4622]: I1126 11:10:53.211066 4622 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 26 11:10:53 crc kubenswrapper[4622]: I1126 11:10:53.211150 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 26 11:10:53 crc kubenswrapper[4622]: I1126 11:10:53.214529 4622 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 26 11:10:53 crc kubenswrapper[4622]: I1126 11:10:53.214554 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.122953 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.123084 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.123987 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.124016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.124024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.125765 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.760710 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.761361 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.761395 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:10:55 crc kubenswrapper[4622]: I1126 11:10:55.761406 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:10:57 crc kubenswrapper[4622]: I1126 11:10:57.430348 4622 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 26 11:10:57 crc kubenswrapper[4622]: I1126 11:10:57.430410 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.205866 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.206941 4622 trace.go:236] Trace[134883079]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Nov-2025 11:10:44.718) (total time: 13488ms): Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[134883079]: ---"Objects listed" error: 13488ms (11:10:58.206) Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[134883079]: [13.488161191s] [13.488161191s] END Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.206964 4622 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.207295 4622 trace.go:236] Trace[1411711037]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Nov-2025 11:10:44.427) (total time: 13779ms): Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[1411711037]: ---"Objects listed" error: 13779ms (11:10:58.207) Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[1411711037]: [13.779502211s] [13.779502211s] END Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.207310 4622 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.207726 4622 trace.go:236] Trace[717310285]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Nov-2025 11:10:43.813) (total time: 14394ms): Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[717310285]: ---"Objects listed" error: 14394ms (11:10:58.207) Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[717310285]: [14.394460894s] [14.394460894s] END Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.207744 4622 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.207888 4622 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.208597 4622 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.209209 4622 trace.go:236] Trace[1380568975]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Nov-2025 11:10:43.856) (total time: 14352ms): Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[1380568975]: ---"Objects listed" error: 14352ms (11:10:58.209) Nov 26 11:10:58 crc kubenswrapper[4622]: Trace[1380568975]: [14.352600951s] [14.352600951s] END Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.209224 4622 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.237761 4622 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43972->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.237804 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43972->192.168.126.11:17697: read: connection reset by peer" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.237773 4622 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43982->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.237846 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43982->192.168.126.11:17697: read: connection reset by peer" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.238083 4622 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.238117 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.439541 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.674821 4622 apiserver.go:52] "Watching apiserver" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.676352 4622 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.676743 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-69txw","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677114 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677273 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677443 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677518 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677284 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.677571 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677617 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.677633 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.677963 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.678011 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.679105 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.679263 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.679393 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.681005 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.681382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.681444 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.682201 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.682330 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.682771 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.683015 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.683036 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.683022 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.693177 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.700657 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.710466 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.715753 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.723149 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.731669 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.738965 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.745731 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.758031 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.768919 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.770361 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172" exitCode=255 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.770387 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172"} Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.774775 4622 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.776610 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.783422 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.789555 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.796260 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.802610 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.810520 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811666 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811753 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811822 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811897 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811965 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812038 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812110 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.811978 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812148 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812260 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812335 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812384 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812403 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812568 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812511 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812693 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812837 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.812846 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813044 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813160 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813189 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813235 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813278 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813297 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813344 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813361 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813309 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813378 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813520 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813556 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813586 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813614 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813640 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813662 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813687 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813707 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813729 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813750 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813774 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813793 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813811 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813830 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813848 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813866 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813890 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813910 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813932 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813953 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814017 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814039 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814078 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814100 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814118 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814140 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814176 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814200 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814220 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814245 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814266 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814290 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814315 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814353 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814379 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814403 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814423 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814442 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814461 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814482 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814520 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814541 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814561 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813525 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813750 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813852 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813884 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.813983 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814221 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814316 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814595 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814795 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814863 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814886 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814960 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815175 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815264 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815282 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815455 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815615 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815715 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815693 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815742 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815760 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815809 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.815909 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816056 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816088 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816122 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816244 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816293 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816316 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816236 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.814603 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816796 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816890 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816957 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817025 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817089 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817154 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817219 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817286 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817560 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817642 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817849 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817922 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817991 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818054 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818115 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818218 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818279 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818414 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818538 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818601 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818823 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818896 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818956 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819015 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819074 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819144 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819209 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819267 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819340 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819400 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819463 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819547 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819616 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819678 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819738 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819801 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819865 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.819928 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820001 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820064 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820131 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820194 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820257 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820317 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820431 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820495 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820590 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820656 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820718 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820779 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820835 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820898 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.820958 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821020 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821085 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821148 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821211 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821275 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.821359 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822084 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822216 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822298 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822394 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822458 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822553 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822612 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.822688 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.823208 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.823691 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.823775 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.823939 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824021 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824123 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824186 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824244 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824300 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824384 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824445 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824517 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826737 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816569 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816581 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.816835 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817158 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817359 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817371 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817365 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828294 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817386 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817652 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.817705 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818287 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818258 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.818628 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.823980 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.824377 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826478 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826480 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826550 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826654 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826661 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.826843 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.827017 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828017 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828147 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828763 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828871 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.828892 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829107 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829134 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829375 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829554 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829681 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829749 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829812 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829873 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829936 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.829995 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830226 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830274 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830662 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830938 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830965 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830971 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.830988 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831018 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831028 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831037 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831108 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831113 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831132 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831154 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831169 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831188 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831205 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831204 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831223 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831224 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831239 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831256 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831272 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831287 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831303 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831318 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831353 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831360 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831371 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831368 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831415 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831437 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831459 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831479 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831603 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.831619 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.832291 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.832533 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.832701 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.833027 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.833142 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.833789 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834470 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834523 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834542 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834558 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834578 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834595 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834612 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834629 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834648 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834666 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834682 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834701 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834647 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834719 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834832 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834859 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834865 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834938 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835726 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835247 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835644 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.834994 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835798 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835822 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835846 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835869 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835890 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835905 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835920 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.835944 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836047 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836366 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836451 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836493 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836552 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9qzd\" (UniqueName: \"kubernetes.io/projected/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-kube-api-access-z9qzd\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836577 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836603 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836626 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836649 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836679 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836715 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-hosts-file\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836736 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836763 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836782 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836876 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836920 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836950 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836990 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837113 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837227 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837247 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837259 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837274 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837284 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837295 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837305 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837316 4622 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837342 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837353 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837367 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837380 4622 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837392 4622 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837436 4622 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837461 4622 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837478 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837490 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836239 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836397 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836405 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836810 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836934 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.836949 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837362 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837362 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837629 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837675 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837752 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837549 4622 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837913 4622 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837930 4622 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837942 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837948 4622 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837970 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837981 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837998 4622 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.838013 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.838026 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837928 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.837955 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.838049 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:10:59.338030763 +0000 UTC m=+18.929242284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.838059 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.841631 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.841716 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.842195 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.842265 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.842559 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.842607 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.843561 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.843636 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:10:59.343621301 +0000 UTC m=+18.934832823 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.843895 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845121 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845092 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845476 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845615 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845736 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845921 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845964 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845960 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846003 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846306 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846196 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846493 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846571 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846679 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846676 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846695 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846734 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846776 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846785 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.845845 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846864 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.846996 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847072 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847303 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.847350 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847363 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847351 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847400 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847564 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847704 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.847798 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:10:59.347445853 +0000 UTC m=+18.938657374 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847864 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.847916 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848029 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848172 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848263 4622 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848354 4622 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848437 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848553 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848707 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848797 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848848 4622 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848879 4622 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848902 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848915 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848926 4622 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848940 4622 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848950 4622 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848961 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848972 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848984 4622 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848993 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.848925 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849003 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849040 4622 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849052 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849065 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849092 4622 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849109 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849120 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849130 4622 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849141 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849177 4622 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849187 4622 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849197 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849210 4622 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849220 4622 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849246 4622 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849257 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849268 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849278 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849287 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849297 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849354 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849365 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849375 4622 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849401 4622 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.849419 4622 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850163 4622 scope.go:117] "RemoveContainer" containerID="9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850509 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850651 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850672 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850683 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850697 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850706 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850715 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850724 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850736 4622 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850746 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850755 4622 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850764 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850777 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850786 4622 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850795 4622 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850807 4622 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850816 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850849 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850859 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850871 4622 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850883 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.850896 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.850920 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850899 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850959 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850973 4622 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.850990 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.850997 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.851004 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.851015 4622 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.851027 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.851714 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.852113 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.854521 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.854993 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855036 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855273 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855407 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855426 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.851049 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:10:59.351032131 +0000 UTC m=+18.942243652 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855483 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855581 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855699 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855725 4622 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855813 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855770 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855854 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.855936 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856013 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856083 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856192 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856247 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856272 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856433 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.856635 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.856756 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.856836 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:58 crc kubenswrapper[4622]: E1126 11:10:58.856945 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:10:59.356931264 +0000 UTC m=+18.948142786 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856998 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856846 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856962 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856813 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856977 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.856698 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857150 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857232 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857425 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857444 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857582 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857759 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857776 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.857766 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.858165 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.859703 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.859798 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.860790 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.863678 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.864155 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.864287 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.864966 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.866101 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.867405 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.867612 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.881795 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.886557 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.889584 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.895617 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960585 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9qzd\" (UniqueName: \"kubernetes.io/projected/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-kube-api-access-z9qzd\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960631 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960648 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-hosts-file\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960675 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960751 4622 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960761 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960769 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960778 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960788 4622 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960796 4622 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960804 4622 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960812 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960820 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960828 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960835 4622 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960843 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960851 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960860 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960868 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960876 4622 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960884 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960893 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960901 4622 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960909 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960917 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960924 4622 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960931 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960938 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960946 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960953 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960962 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960971 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960979 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960987 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.960994 4622 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961003 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961011 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961020 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961028 4622 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961036 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961044 4622 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961053 4622 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961061 4622 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961069 4622 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961076 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961083 4622 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961092 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961100 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961109 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961117 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961124 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961131 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961139 4622 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961145 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961153 4622 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961161 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961168 4622 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961176 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961183 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961191 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961199 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961213 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961221 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961229 4622 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961245 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961253 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961260 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961268 4622 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961275 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961281 4622 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961288 4622 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961296 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961303 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961312 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961319 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961334 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961342 4622 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961352 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961359 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961367 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961375 4622 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961383 4622 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961391 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961401 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961409 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961416 4622 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961424 4622 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961431 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961438 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961448 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961455 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961462 4622 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961469 4622 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961478 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961486 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961494 4622 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961519 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961526 4622 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961533 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961540 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961547 4622 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961592 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.961970 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.962000 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-hosts-file\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.976844 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9qzd\" (UniqueName: \"kubernetes.io/projected/8a91aff9-4cb8-4cab-acef-c37fbfa011ef-kube-api-access-z9qzd\") pod \"node-resolver-69txw\" (UID: \"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\") " pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.988157 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.993381 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 26 11:10:58 crc kubenswrapper[4622]: W1126 11:10:58.998932 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-66980c08dd245af815c32742831cd11a982f2ab9678d90247391e8a7b33494b1 WatchSource:0}: Error finding container 66980c08dd245af815c32742831cd11a982f2ab9678d90247391e8a7b33494b1: Status 404 returned error can't find the container with id 66980c08dd245af815c32742831cd11a982f2ab9678d90247391e8a7b33494b1 Nov 26 11:10:58 crc kubenswrapper[4622]: I1126 11:10:58.999682 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-69txw" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.004282 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 26 11:10:59 crc kubenswrapper[4622]: W1126 11:10:59.006284 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-7eddb458384c624b7481391aa7b046086995cef48b7e301a6acbe9ebebb60b19 WatchSource:0}: Error finding container 7eddb458384c624b7481391aa7b046086995cef48b7e301a6acbe9ebebb60b19: Status 404 returned error can't find the container with id 7eddb458384c624b7481391aa7b046086995cef48b7e301a6acbe9ebebb60b19 Nov 26 11:10:59 crc kubenswrapper[4622]: W1126 11:10:59.017382 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-825d673937957d0c041aba29ed4af246fb0498ba2ea013a1da397c5b9a797696 WatchSource:0}: Error finding container 825d673937957d0c041aba29ed4af246fb0498ba2ea013a1da397c5b9a797696: Status 404 returned error can't find the container with id 825d673937957d0c041aba29ed4af246fb0498ba2ea013a1da397c5b9a797696 Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.365982 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366016 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:00.365983143 +0000 UTC m=+19.957194664 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.366149 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.366191 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.366226 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.366245 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366390 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366429 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366463 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:00.36644644 +0000 UTC m=+19.957657962 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366466 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366389 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366489 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366528 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366559 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366578 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366537 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:00.36653081 +0000 UTC m=+19.957742332 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366634 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:00.366624147 +0000 UTC m=+19.957835669 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:59 crc kubenswrapper[4622]: E1126 11:10:59.366651 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:00.366643825 +0000 UTC m=+19.957855347 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.774079 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.776205 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.776407 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.777769 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"825d673937957d0c041aba29ed4af246fb0498ba2ea013a1da397c5b9a797696"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.779423 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-69txw" event={"ID":"8a91aff9-4cb8-4cab-acef-c37fbfa011ef","Type":"ContainerStarted","Data":"beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.779485 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-69txw" event={"ID":"8a91aff9-4cb8-4cab-acef-c37fbfa011ef","Type":"ContainerStarted","Data":"d6166afd5185dd2a78ed942456664ac766928ba6fb49f428298948e236846546"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.781441 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.781474 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7eddb458384c624b7481391aa7b046086995cef48b7e301a6acbe9ebebb60b19"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.783737 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.783774 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.783788 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"66980c08dd245af815c32742831cd11a982f2ab9678d90247391e8a7b33494b1"} Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.788612 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.797591 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.805760 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.814109 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.820413 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.828291 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.837024 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.845103 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.853870 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.862411 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.872115 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.880733 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.888215 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.898264 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.905862 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.916759 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.925902 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:10:59 crc kubenswrapper[4622]: I1126 11:10:59.936513 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:10:59Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.375452 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.375546 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.375576 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.375633 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.375659 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.375621429 +0000 UTC m=+21.966832950 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.375700 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.375689587 +0000 UTC m=+21.966901109 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.375764 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.375808 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.375962 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.375987 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376002 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376031 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376046 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376049 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.376039422 +0000 UTC m=+21.967250943 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376060 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376124 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.376108331 +0000 UTC m=+21.967319854 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376289 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.376398 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.376388453 +0000 UTC m=+21.967599975 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.490456 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-xf2zs"] Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491004 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vmw42"] Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491144 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-k565w"] Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491188 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491258 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491297 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qx5dc"] Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.491386 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.492268 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.493142 4622 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.493180 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.493220 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.493308 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.493475 4622 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.493481 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.493493 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.493552 4622 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.493580 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.493630 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.494419 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.494439 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": failed to list *v1.Secret: secrets "ovn-kubernetes-node-dockercfg-pwtwl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.494469 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-node-dockercfg-pwtwl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-node-dockercfg-pwtwl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.494641 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.494729 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496011 4622 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496086 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": failed to list *v1.Secret: secrets "ovn-node-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496111 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"env-overrides": failed to list *v1.ConfigMap: configmaps "env-overrides" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496119 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-node-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-node-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.496131 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496137 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"env-overrides\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"env-overrides\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496033 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": failed to list *v1.ConfigMap: configmaps "ovnkube-script-lib" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496179 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-script-lib\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-script-lib\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496072 4622 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496200 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496082 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496219 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.496271 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.496367 4622 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-config": failed to list *v1.ConfigMap: configmaps "ovnkube-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496385 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.496528 4622 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.517140 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.533115 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.549694 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.574033 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.586396 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.595094 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.606279 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.615151 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.623250 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.632856 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.642244 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.653767 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.665998 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680071 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680411 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-system-cni-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680560 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680670 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680750 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680817 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680895 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-etc-kubernetes\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.680970 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cnibin\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681047 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681108 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681171 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681242 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-multus-daemon-config\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681364 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wxnt\" (UniqueName: \"kubernetes.io/projected/673ce8e8-772e-4a53-9cc4-7cd647a16d00-kube-api-access-9wxnt\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681454 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-hostroot\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681549 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681614 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681674 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-cnibin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681765 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681851 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681925 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-socket-dir-parent\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.681997 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-bin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682064 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682139 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq56j\" (UniqueName: \"kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682205 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682284 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-rootfs\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682354 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682431 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-binary-copy\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682544 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682637 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-k8s-cni-cncf-io\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682702 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-kubelet\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682790 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682870 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-proxy-tls\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.682947 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-multus\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683016 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683086 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-conf-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683163 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwxwz\" (UniqueName: \"kubernetes.io/projected/fc4efcee-b872-406d-a694-3572222a8dfc-kube-api-access-lwxwz\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683237 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-os-release\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683303 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683384 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683454 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683550 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-os-release\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683631 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-multus-certs\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683701 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683775 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-system-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683846 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683918 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-mcd-auth-proxy-config\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.683996 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.684058 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-netns\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.684133 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-cni-binary-copy\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.690377 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.700750 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.705669 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.705696 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.705735 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.705784 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.705844 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:00 crc kubenswrapper[4622]: E1126 11:11:00.705933 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.709290 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.710007 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.710649 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.710694 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.711580 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.712199 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.713213 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.713826 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.714827 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.715513 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.716471 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.717034 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.718157 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.718759 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.719283 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.720213 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.720786 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.721735 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.722144 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.722677 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.722868 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.725884 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.726413 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.727387 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.727994 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.729045 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.729535 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.730136 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.731299 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.731812 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.732739 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.733219 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.734061 4622 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.734219 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.734565 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.735943 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.736452 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.737239 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.738680 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.739291 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.740213 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.740852 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.741833 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.742636 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.743522 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.743672 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.744360 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.744953 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.745413 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.745952 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.746491 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.747165 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.747628 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.748047 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.748561 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.749034 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.750300 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.751051 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.753145 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.761855 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.774008 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.784886 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-cni-binary-copy\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.784930 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-netns\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.784949 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.784968 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.784985 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-system-cni-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785005 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785020 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785036 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785051 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-etc-kubernetes\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785065 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cnibin\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785087 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785102 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785119 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-multus-daemon-config\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785135 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wxnt\" (UniqueName: \"kubernetes.io/projected/673ce8e8-772e-4a53-9cc4-7cd647a16d00-kube-api-access-9wxnt\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785153 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785169 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-hostroot\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785199 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785213 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785228 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-cnibin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785243 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq56j\" (UniqueName: \"kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785258 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785274 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-socket-dir-parent\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785292 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-bin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785548 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785639 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785665 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785719 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-hostroot\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785932 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785963 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-netns\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.785989 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786010 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786032 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-system-cni-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786056 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-etc-kubernetes\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786135 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-cni-binary-copy\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786176 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cnibin\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786183 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786200 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786217 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc4efcee-b872-406d-a694-3572222a8dfc-multus-daemon-config\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786279 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-socket-dir-parent\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786319 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-cnibin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786363 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786401 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-rootfs\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786419 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786438 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-binary-copy\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786472 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786489 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-k8s-cni-cncf-io\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786555 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-rootfs\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786579 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-bin\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786794 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.786917 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-k8s-cni-cncf-io\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787106 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-binary-copy\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787110 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/673ce8e8-772e-4a53-9cc4-7cd647a16d00-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787137 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-kubelet\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787158 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-proxy-tls\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787172 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787188 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-multus\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787198 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787206 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787171 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-kubelet\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787228 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787238 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787244 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787252 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787260 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-conf-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787244 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-var-lib-cni-multus\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787269 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787293 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwxwz\" (UniqueName: \"kubernetes.io/projected/fc4efcee-b872-406d-a694-3572222a8dfc-kube-api-access-lwxwz\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787243 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787308 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-conf-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787315 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-os-release\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787466 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787487 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787522 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-system-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787538 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-os-release\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787554 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-multus-certs\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787571 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-mcd-auth-proxy-config\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787595 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-system-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787605 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787621 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787626 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787655 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-os-release\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787689 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787720 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-host-run-multus-certs\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787731 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-os-release\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.787732 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc4efcee-b872-406d-a694-3572222a8dfc-multus-cni-dir\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.788074 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/673ce8e8-772e-4a53-9cc4-7cd647a16d00-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.800873 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.801050 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wxnt\" (UniqueName: \"kubernetes.io/projected/673ce8e8-772e-4a53-9cc4-7cd647a16d00-kube-api-access-9wxnt\") pod \"multus-additional-cni-plugins-xf2zs\" (UID: \"673ce8e8-772e-4a53-9cc4-7cd647a16d00\") " pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.801054 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwxwz\" (UniqueName: \"kubernetes.io/projected/fc4efcee-b872-406d-a694-3572222a8dfc-kube-api-access-lwxwz\") pod \"multus-vmw42\" (UID: \"fc4efcee-b872-406d-a694-3572222a8dfc\") " pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.803077 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.811851 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: W1126 11:11:00.812918 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod673ce8e8_772e_4a53_9cc4_7cd647a16d00.slice/crio-a9aa1400d04f92a44a1cd8448e0aa91fb5647712c71487ff7144a3dbd1fe1572 WatchSource:0}: Error finding container a9aa1400d04f92a44a1cd8448e0aa91fb5647712c71487ff7144a3dbd1fe1572: Status 404 returned error can't find the container with id a9aa1400d04f92a44a1cd8448e0aa91fb5647712c71487ff7144a3dbd1fe1572 Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.818949 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vmw42" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.824084 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.838629 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.848179 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.857987 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.867307 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.876779 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.887055 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.896978 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.904653 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.917484 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.926162 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.932983 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.936098 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.940895 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.942741 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.947014 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.961647 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.971172 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.980848 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.990147 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:00 crc kubenswrapper[4622]: I1126 11:11:00.999156 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.008357 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.016367 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.023290 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.033008 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.040695 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.050986 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.059296 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.080696 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.122120 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.164598 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.205742 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.241987 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.280474 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.322275 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.361012 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.400007 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.408556 4622 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.409596 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.409629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.409638 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.409690 4622 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.463408 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.474201 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.481409 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-proxy-tls\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.495956 4622 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.496149 4622 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.497085 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.497116 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.497126 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.497140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.497149 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.514416 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.514645 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.517084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.517103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.517111 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.517122 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.517130 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.525151 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.527278 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.527306 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.527315 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.527325 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.527332 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.537692 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.540632 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.540672 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.540682 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.540699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.540711 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.548740 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.550808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.550838 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.550848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.550863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.550874 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.554579 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.556994 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.559606 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.559711 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.560923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.561039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.561102 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.561167 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.561231 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.579703 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.594220 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.599334 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-mcd-auth-proxy-config\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.635108 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.662117 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.663665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.663699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.663710 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.663727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.663741 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.701849 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.750038 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.766058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.766100 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.766113 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.766132 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.766145 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.783222 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.786905 4622 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/env-overrides: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.786919 4622 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-node-metrics-cert: failed to sync secret cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.786989 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides podName:9cf9b509-1f95-4119-a348-92cba5fc8bb9 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.286967559 +0000 UTC m=+21.878179080 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "env-overrides" (UniqueName: "kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides") pod "ovnkube-node-qx5dc" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9") : failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.787008 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert podName:9cf9b509-1f95-4119-a348-92cba5fc8bb9 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.287001723 +0000 UTC m=+21.878213246 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-node-metrics-cert" (UniqueName: "kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert") pod "ovnkube-node-qx5dc" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9") : failed to sync secret cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.787449 4622 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/ovnkube-script-lib: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.787560 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib podName:9cf9b509-1f95-4119-a348-92cba5fc8bb9 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.287541837 +0000 UTC m=+21.878753359 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovnkube-script-lib" (UniqueName: "kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib") pod "ovnkube-node-qx5dc" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9") : failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.791720 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.792859 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerStarted","Data":"4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.792913 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerStarted","Data":"f04d0ba2509734dc42e75c86e091d972bde8c6c53387e4bf60673057df04495b"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.795008 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f" exitCode=0 Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.795093 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.795133 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerStarted","Data":"a9aa1400d04f92a44a1cd8448e0aa91fb5647712c71487ff7144a3dbd1fe1572"} Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.796616 4622 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.796651 4622 projected.go:194] Error preparing data for projected volume kube-api-access-fq56j for pod openshift-machine-config-operator/machine-config-daemon-k565w: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.796702 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j podName:b4b2dbdb-8e61-40e9-86ae-3dba474c215b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.296684991 +0000 UTC m=+21.887896514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fq56j" (UniqueName: "kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j") pod "machine-config-daemon-k565w" (UID: "b4b2dbdb-8e61-40e9-86ae-3dba474c215b") : failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.797231 4622 projected.go:288] Couldn't get configMap openshift-ovn-kubernetes/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.797291 4622 projected.go:194] Error preparing data for projected volume kube-api-access-49744 for pod openshift-ovn-kubernetes/ovnkube-node-qx5dc: failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.797385 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744 podName:9cf9b509-1f95-4119-a348-92cba5fc8bb9 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:02.297363428 +0000 UTC m=+21.888574949 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-49744" (UniqueName: "kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744") pod "ovnkube-node-qx5dc" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9") : failed to sync configmap cache: timed out waiting for the condition Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.808222 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: E1126 11:11:01.818909 4622 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.861403 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.868079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.868117 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.868128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.868142 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.868152 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.874813 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.914561 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.940664 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:01Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.954325 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.970915 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.970950 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.970958 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.970973 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.970982 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:01Z","lastTransitionTime":"2025-11-26T11:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:01 crc kubenswrapper[4622]: I1126 11:11:01.993900 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.013961 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.043131 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.073443 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.073477 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.073485 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.073515 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.073528 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.081317 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.094551 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.141468 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.176182 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.176238 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.176250 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.176272 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.176287 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.182731 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.221121 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.261829 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.278666 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.278709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.278721 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.278741 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.278756 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.298776 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.298838 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.298870 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq56j\" (UniqueName: \"kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.298891 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.298933 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.299559 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.299660 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.301799 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.302017 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.302095 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq56j\" (UniqueName: \"kubernetes.io/projected/b4b2dbdb-8e61-40e9-86ae-3dba474c215b-kube-api-access-fq56j\") pod \"machine-config-daemon-k565w\" (UID: \"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\") " pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.302475 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") pod \"ovnkube-node-qx5dc\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.310306 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.333053 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.345441 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: W1126 11:11:02.345861 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cf9b509_1f95_4119_a348_92cba5fc8bb9.slice/crio-52ed3b80dc23320df26ea32271abb5917b693e789c175fd038a2489b30471874 WatchSource:0}: Error finding container 52ed3b80dc23320df26ea32271abb5917b693e789c175fd038a2489b30471874: Status 404 returned error can't find the container with id 52ed3b80dc23320df26ea32271abb5917b693e789c175fd038a2489b30471874 Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381644 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381680 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381703 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381725 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.381848 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.399190 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399331 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:06.399315887 +0000 UTC m=+25.990527409 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.399472 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.399527 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.399552 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.399572 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399645 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399683 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:06.399673244 +0000 UTC m=+25.990884766 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399719 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399739 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:06.399734261 +0000 UTC m=+25.990945783 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399785 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399815 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399827 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399868 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:06.399853746 +0000 UTC m=+25.991065268 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399791 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399891 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399901 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.399922 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:06.399916044 +0000 UTC m=+25.991127567 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.421874 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.464877 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.483752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.483810 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.483820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.483843 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.483855 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.502693 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.543036 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.583561 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.586590 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.586708 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.586772 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.586838 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.586889 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.630413 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.664375 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.689800 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.689912 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.689972 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.690067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.690130 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.700764 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.705007 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.705070 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.705251 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.705300 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.705097 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:02 crc kubenswrapper[4622]: E1126 11:11:02.705453 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.740940 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.781170 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.792417 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.792444 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.792453 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.792466 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.792474 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.799376 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" exitCode=0 Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.799450 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.799487 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"52ed3b80dc23320df26ea32271abb5917b693e789c175fd038a2489b30471874"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.801521 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4" exitCode=0 Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.801567 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.803879 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.803904 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.803915 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"cf082a4040a1c0d348f549e9ac601b7d78de080b897b2678394fe511236c8ea4"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.822206 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.860849 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.894026 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.894050 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.894058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.894070 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.894078 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.903377 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.939854 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.981061 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:02Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.996544 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.996568 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.996576 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.996589 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:02 crc kubenswrapper[4622]: I1126 11:11:02.996598 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:02Z","lastTransitionTime":"2025-11-26T11:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.020163 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.061767 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.099243 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.099265 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.099273 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.099285 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.099293 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.101089 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.141306 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.181419 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.201098 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.201123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.201131 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.201142 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.201150 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.221136 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.260020 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.300357 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.302755 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.302790 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.302800 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.302815 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.302825 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.346352 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.384728 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.405757 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.405789 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.405799 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.405812 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.405820 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.428711 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.462748 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.503026 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.507390 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.507434 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.507445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.507463 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.507475 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.540723 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.580997 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.609524 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.609559 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.609569 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.609583 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.609592 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.712408 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.712438 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.712446 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.712462 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.712471 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808811 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808852 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808862 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808885 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808894 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.808903 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.810938 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513" exitCode=0 Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.810967 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.813541 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.813578 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.813590 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.813604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.813613 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.821890 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.833399 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.847820 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.862566 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.873235 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.884617 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.895605 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.907480 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.916620 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.916667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.916682 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.916703 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.916715 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:03Z","lastTransitionTime":"2025-11-26T11:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.944038 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:03 crc kubenswrapper[4622]: I1126 11:11:03.982275 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.019725 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.019773 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.019784 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.019815 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.019826 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.023061 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.062024 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.101789 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.121920 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.121949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.121958 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.121974 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.121987 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.141044 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.224397 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.224450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.224461 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.224476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.224486 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.326651 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.326691 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.326701 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.326713 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.326721 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.429138 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.429206 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.429223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.429248 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.429260 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.433585 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.437449 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.452791 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.467819 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.480286 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.490165 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.500877 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.513343 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.524277 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.531903 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.531946 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.531957 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.531978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.531990 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.532381 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.544795 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.553285 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.581941 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.620445 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.634468 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.634520 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.634531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.634547 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.634556 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.662847 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.704099 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.705253 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:04 crc kubenswrapper[4622]: E1126 11:11:04.705385 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.705486 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:04 crc kubenswrapper[4622]: E1126 11:11:04.705736 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.705793 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:04 crc kubenswrapper[4622]: E1126 11:11:04.705849 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.736722 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.736757 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.736770 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.736787 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.736798 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.740626 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.779642 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.817153 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3" exitCode=0 Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.817236 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.823430 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.838961 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.838993 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.839005 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.839017 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.839026 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.860625 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.902353 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.941207 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.942032 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.942105 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.942124 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.942168 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.942186 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:04Z","lastTransitionTime":"2025-11-26T11:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:04 crc kubenswrapper[4622]: I1126 11:11:04.983328 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:04Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.026647 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.044471 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.044520 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.044589 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.044603 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.044618 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.061020 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.102071 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.140546 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.150447 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.150702 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.150713 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.150729 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.150738 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.181161 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.226065 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.253307 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.253354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.253385 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.253400 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.253410 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.262157 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.302288 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.342353 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.354893 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.354920 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.354932 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.354945 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.354954 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.384112 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.426288 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.457193 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.457229 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.457242 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.457257 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.457267 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.462342 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.500690 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.551810 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.559991 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.560026 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.560037 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.560051 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.560062 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.585447 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.622438 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.662077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.662126 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.662136 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.662157 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.662170 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.669686 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.703109 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.743476 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.764796 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.764837 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.764848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.764861 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.764870 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.784343 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.826755 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.826847 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.829621 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248" exitCode=0 Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.829690 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.861967 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.866401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.866423 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.866431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.866446 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.866456 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.900534 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.945965 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.970912 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.970959 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.970971 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.970989 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.971009 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:05Z","lastTransitionTime":"2025-11-26T11:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:05 crc kubenswrapper[4622]: I1126 11:11:05.986784 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:05Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.022287 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.061689 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.074174 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.074220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.074240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.074286 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.074309 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.099704 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.141397 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.176518 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.176567 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.176579 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.176598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.176610 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.183710 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.224118 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.259830 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.279063 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.279093 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.279102 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.279115 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.279124 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.302582 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.344553 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381300 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381738 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381760 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.381770 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.426946 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.427006 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.427030 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.427045 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.427074 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427174 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427193 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427203 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427234 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:14.42722303 +0000 UTC m=+34.018434552 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427521 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:14.427492137 +0000 UTC m=+34.018703659 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427569 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427578 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427585 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427605 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:14.42759964 +0000 UTC m=+34.018811162 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427642 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427660 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:14.427654584 +0000 UTC m=+34.018866106 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427694 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.427710 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:14.42770574 +0000 UTC m=+34.018917262 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.483524 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.483561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.483570 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.483585 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.483593 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.585350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.585606 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.585616 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.585630 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.585638 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.687634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.687665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.687673 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.687686 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.687693 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.705079 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.705106 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.705128 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.705154 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.705253 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:06 crc kubenswrapper[4622]: E1126 11:11:06.705329 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.789552 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.789589 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.789599 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.789610 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.789619 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.835462 4622 generic.go:334] "Generic (PLEG): container finished" podID="673ce8e8-772e-4a53-9cc4-7cd647a16d00" containerID="13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd" exitCode=0 Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.835510 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerDied","Data":"13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.849428 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.870678 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.886280 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.895445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.895476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.895486 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.895524 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.895536 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.897414 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.907052 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.916110 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.924319 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.937791 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.947318 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.956213 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.965738 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.974449 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.984642 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997223 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:06Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997559 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997591 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997600 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997616 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:06 crc kubenswrapper[4622]: I1126 11:11:06.997624 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:06Z","lastTransitionTime":"2025-11-26T11:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.099720 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.099753 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.099764 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.099777 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.099785 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.202395 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.202460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.202474 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.202514 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.202529 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.306011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.306057 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.306067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.306088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.306099 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.408001 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.408035 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.408044 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.408058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.408066 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.510667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.510700 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.510711 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.510725 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.510734 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.612985 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.613018 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.613027 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.613040 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.613052 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.714793 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.714829 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.714839 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.714853 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.714862 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.817841 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.818080 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.818093 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.818110 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.818119 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.843929 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.844275 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.848730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" event={"ID":"673ce8e8-772e-4a53-9cc4-7cd647a16d00","Type":"ContainerStarted","Data":"0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.855859 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.864715 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.867638 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.883316 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.897360 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.907743 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.920273 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.921028 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.921070 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.921080 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.921101 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.921112 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:07Z","lastTransitionTime":"2025-11-26T11:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.929379 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.939413 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.948386 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.959547 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.967737 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.974890 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.984735 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:07 crc kubenswrapper[4622]: I1126 11:11:07.994733 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:07Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.012184 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.023147 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.024148 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.024194 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.024205 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.024220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.024233 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.032562 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.041351 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.054404 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.067482 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.078659 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.086723 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.097987 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.108296 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.118269 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.126487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.126572 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.126585 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.126610 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.126625 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.128706 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.141485 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.155585 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.229858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.229923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.229939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.229964 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.229975 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.332154 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.332377 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.332466 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.332557 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.332613 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.436033 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.436117 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.436132 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.436159 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.436173 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.539136 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.539200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.539212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.539236 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.539250 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.641347 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.641405 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.641415 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.641434 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.641449 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.706072 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.706135 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:08 crc kubenswrapper[4622]: E1126 11:11:08.706188 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.706149 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:08 crc kubenswrapper[4622]: E1126 11:11:08.706278 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:08 crc kubenswrapper[4622]: E1126 11:11:08.706407 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.743551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.743579 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.743587 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.743603 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.743614 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.846178 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.846212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.846224 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.846240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.846251 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.851465 4622 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.851906 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.877028 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.891061 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.900831 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.911382 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.925442 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.938485 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.947180 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.948372 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.948470 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.948565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.948637 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.948700 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:08Z","lastTransitionTime":"2025-11-26T11:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.955687 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.963685 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.971621 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.980102 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.987826 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:08 crc kubenswrapper[4622]: I1126 11:11:08.994811 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:08Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.002394 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.009375 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.051556 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.051596 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.051607 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.051628 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.051640 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.154313 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.154348 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.154359 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.154388 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.154400 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.256074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.256110 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.256119 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.256134 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.256145 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.358115 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.358405 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.358494 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.358608 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.358669 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.461066 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.461116 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.461128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.461149 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.461159 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.563738 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.564058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.564068 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.564084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.564094 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.666011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.666055 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.666067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.666084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.666095 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.771180 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.771237 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.771255 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.771275 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.771288 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.858462 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/0.log" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.861320 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba" exitCode=1 Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.861398 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.862156 4622 scope.go:117] "RemoveContainer" containerID="097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.874360 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.874404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.874415 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.874432 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.874442 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.876005 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.886777 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.899018 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.913597 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.923645 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.939386 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.952564 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.965322 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.977734 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.978042 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.978156 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.978326 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.978483 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.978642 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:09Z","lastTransitionTime":"2025-11-26T11:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.988679 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:09 crc kubenswrapper[4622]: I1126 11:11:09.999055 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:09Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.009316 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.019998 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.028200 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.081807 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.081854 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.081866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.081889 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.081902 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.183715 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.183763 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.183772 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.183788 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.183798 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.285521 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.285559 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.285568 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.285587 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.285597 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.388103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.388150 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.388160 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.388179 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.388194 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.491797 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.491946 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.491976 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.492071 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.492109 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.594931 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.595006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.595018 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.595051 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.595065 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.697650 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.697713 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.697724 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.697746 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.697760 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.705634 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.705643 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:10 crc kubenswrapper[4622]: E1126 11:11:10.705774 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:10 crc kubenswrapper[4622]: E1126 11:11:10.705941 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.706328 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:10 crc kubenswrapper[4622]: E1126 11:11:10.706627 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.722003 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.725216 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.734993 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.746649 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.760800 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.776077 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.799843 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.800056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.800088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.800097 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.800115 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.800126 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.813002 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.836133 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.854053 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.865857 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/1.log" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.866343 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/0.log" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.868437 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" exitCode=1 Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.868477 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.868534 4622 scope.go:117] "RemoveContainer" containerID="097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.869121 4622 scope.go:117] "RemoveContainer" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" Nov 26 11:11:10 crc kubenswrapper[4622]: E1126 11:11:10.869273 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.869277 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.880073 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.888926 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.898691 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.901816 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.901846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.901856 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.901869 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.901878 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:10Z","lastTransitionTime":"2025-11-26T11:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.907469 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.916567 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.926154 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.936724 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.946935 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.967703 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.979806 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.988698 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:10 crc kubenswrapper[4622]: I1126 11:11:10.996993 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.004939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.005012 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.005028 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.005051 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.005075 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.008526 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.016685 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.025990 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.035058 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.045383 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.058922 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.108814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.108894 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.108906 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.108929 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.108947 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.212151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.212194 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.212210 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.212232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.212245 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.251282 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm"] Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.251834 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.254382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.255403 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.264170 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.268622 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.268651 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.268671 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/68156c18-d363-47be-990d-c722986bdfae-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.268699 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqdtq\" (UniqueName: \"kubernetes.io/projected/68156c18-d363-47be-990d-c722986bdfae-kube-api-access-rqdtq\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.276197 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.289971 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.304742 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.314852 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.314888 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.314901 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.314919 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.314929 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.316142 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.326058 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.335339 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.343843 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.354048 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.363946 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.369358 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.369401 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.369426 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/68156c18-d363-47be-990d-c722986bdfae-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.369472 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqdtq\" (UniqueName: \"kubernetes.io/projected/68156c18-d363-47be-990d-c722986bdfae-kube-api-access-rqdtq\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.370316 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.370523 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/68156c18-d363-47be-990d-c722986bdfae-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.373072 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.375283 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/68156c18-d363-47be-990d-c722986bdfae-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.382889 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqdtq\" (UniqueName: \"kubernetes.io/projected/68156c18-d363-47be-990d-c722986bdfae-kube-api-access-rqdtq\") pod \"ovnkube-control-plane-749d76644c-8g4tm\" (UID: \"68156c18-d363-47be-990d-c722986bdfae\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.383644 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.393294 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.402782 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.411378 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.417183 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.417489 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.417518 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.417534 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.417544 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.520866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.520926 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.520939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.520964 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.520977 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.563402 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" Nov 26 11:11:11 crc kubenswrapper[4622]: W1126 11:11:11.578011 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68156c18_d363_47be_990d_c722986bdfae.slice/crio-f18a157462c7039134d16489d9b06616b99282dd2af32bc7e77e04441a014932 WatchSource:0}: Error finding container f18a157462c7039134d16489d9b06616b99282dd2af32bc7e77e04441a014932: Status 404 returned error can't find the container with id f18a157462c7039134d16489d9b06616b99282dd2af32bc7e77e04441a014932 Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.614605 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-cqclz"] Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.615250 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.617619 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.619062 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.620846 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.621028 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.627871 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.627904 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.627921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.627940 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.627951 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632088 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632783 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632835 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632852 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.632866 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.640956 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.641363 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.644868 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.644914 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.644926 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.644938 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.644946 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.650408 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.653396 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.656604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.656634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.656645 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.656659 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.656669 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.659981 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.664902 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.667528 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.667575 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.667585 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.667604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.667616 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.672175 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba36f366-6db7-4902-90b4-6630fa4b1602-host\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.672213 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxtls\" (UniqueName: \"kubernetes.io/projected/ba36f366-6db7-4902-90b4-6630fa4b1602-kube-api-access-pxtls\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.672234 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ba36f366-6db7-4902-90b4-6630fa4b1602-serviceca\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.672122 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.676634 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.679671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.679704 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.679714 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.679728 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.679738 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.686236 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.689121 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.689245 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.695493 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.708931 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.718470 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.728294 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.730009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.730047 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.730058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.730075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.730117 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.738922 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.749286 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.759958 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.771230 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.773052 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxtls\" (UniqueName: \"kubernetes.io/projected/ba36f366-6db7-4902-90b4-6630fa4b1602-kube-api-access-pxtls\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.773099 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ba36f366-6db7-4902-90b4-6630fa4b1602-serviceca\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.773172 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba36f366-6db7-4902-90b4-6630fa4b1602-host\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.773237 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba36f366-6db7-4902-90b4-6630fa4b1602-host\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.774436 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ba36f366-6db7-4902-90b4-6630fa4b1602-serviceca\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.782267 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.788278 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxtls\" (UniqueName: \"kubernetes.io/projected/ba36f366-6db7-4902-90b4-6630fa4b1602-kube-api-access-pxtls\") pod \"node-ca-cqclz\" (UID: \"ba36f366-6db7-4902-90b4-6630fa4b1602\") " pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.790734 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.832690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.832738 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.832748 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.832764 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.832776 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.872748 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" event={"ID":"68156c18-d363-47be-990d-c722986bdfae","Type":"ContainerStarted","Data":"0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.872787 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" event={"ID":"68156c18-d363-47be-990d-c722986bdfae","Type":"ContainerStarted","Data":"64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.872798 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" event={"ID":"68156c18-d363-47be-990d-c722986bdfae","Type":"ContainerStarted","Data":"f18a157462c7039134d16489d9b06616b99282dd2af32bc7e77e04441a014932"} Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.875674 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/1.log" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.879135 4622 scope.go:117] "RemoveContainer" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" Nov 26 11:11:11 crc kubenswrapper[4622]: E1126 11:11:11.879360 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.895833 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.911283 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.921741 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.927199 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cqclz" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.930821 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.934800 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.934848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.934859 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.934875 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.934886 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:11Z","lastTransitionTime":"2025-11-26T11:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:11 crc kubenswrapper[4622]: W1126 11:11:11.937767 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba36f366_6db7_4902_90b4_6630fa4b1602.slice/crio-83cad328bf673781d55c7050ee305c52380b1388d1859cb69cc470cc8a8bd2ae WatchSource:0}: Error finding container 83cad328bf673781d55c7050ee305c52380b1388d1859cb69cc470cc8a8bd2ae: Status 404 returned error can't find the container with id 83cad328bf673781d55c7050ee305c52380b1388d1859cb69cc470cc8a8bd2ae Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.944779 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.957733 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:11 crc kubenswrapper[4622]: I1126 11:11:11.980218 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:11Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.019732 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.037320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.037342 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.037352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.037391 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.037402 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.060381 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.100719 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.140654 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.140693 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.140702 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.140718 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.140727 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.141077 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.180296 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.220796 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.242675 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.242708 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.242717 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.242729 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.242736 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.264200 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.303027 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.326320 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-z78ph"] Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.326874 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.326947 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344514 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344543 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344566 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344579 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.344917 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097106f2d49669b73dfa274224904a5f5f41cc3bed462f8250904376fbbbeaba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:09Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.767153 5917 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1126 11:11:09.767533 5917 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1126 11:11:09.770146 5917 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1126 11:11:09.770165 5917 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1126 11:11:09.770180 5917 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1126 11:11:09.770191 5917 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1126 11:11:09.770197 5917 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1126 11:11:09.770208 5917 factory.go:656] Stopping watch factory\\\\nI1126 11:11:09.770229 5917 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1126 11:11:09.770243 5917 handler.go:208] Removed *v1.Node event handler 2\\\\nI1126 11:11:09.770250 5917 handler.go:208] Removed *v1.Node event handler 7\\\\nI1126 11:11:09.770256 5917 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1126 11:11:09.770262 5917 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.378494 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvfww\" (UniqueName: \"kubernetes.io/projected/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-kube-api-access-tvfww\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.378566 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.382472 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.420706 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.447009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.447064 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.447076 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.447089 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.447116 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.461423 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.479115 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.479215 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvfww\" (UniqueName: \"kubernetes.io/projected/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-kube-api-access-tvfww\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.479335 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.479477 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:12.979447583 +0000 UTC m=+32.570659105 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.508380 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.528756 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvfww\" (UniqueName: \"kubernetes.io/projected/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-kube-api-access-tvfww\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.549324 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.549391 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.549404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.549426 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.549439 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.562598 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.603805 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.642285 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.652261 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.652302 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.652313 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.652330 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.652340 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.687345 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.705064 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.705168 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.705181 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.705329 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.705418 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.705607 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.719098 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.754531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.754664 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.754743 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.754833 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.754901 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.764570 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.801321 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.841786 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.857566 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.857621 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.857634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.857655 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.857671 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.879850 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.883030 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cqclz" event={"ID":"ba36f366-6db7-4902-90b4-6630fa4b1602","Type":"ContainerStarted","Data":"af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.883153 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cqclz" event={"ID":"ba36f366-6db7-4902-90b4-6630fa4b1602","Type":"ContainerStarted","Data":"83cad328bf673781d55c7050ee305c52380b1388d1859cb69cc470cc8a8bd2ae"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.922085 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.960672 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.960740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.960752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.960777 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.960796 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:12Z","lastTransitionTime":"2025-11-26T11:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.961060 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:12Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:12 crc kubenswrapper[4622]: I1126 11:11:12.984468 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.984616 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:12 crc kubenswrapper[4622]: E1126 11:11:12.984669 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:13.984657492 +0000 UTC m=+33.575869014 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.004866 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.042055 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.063455 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.063486 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.063519 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.063536 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.063546 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.081560 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.121734 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.160632 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.166077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.166168 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.166181 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.166276 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.166311 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.203188 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.244497 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.268266 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.268299 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.268309 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.268325 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.268334 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.281486 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.320834 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.360826 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.371427 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.371468 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.371480 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.371527 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.371544 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.399614 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.442240 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.474170 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.474301 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.474396 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.474481 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.474579 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.484723 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.528230 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.560660 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.576400 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.576429 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.576439 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.576453 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.576462 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.601844 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.642940 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.679069 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.679103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.679112 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.679127 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.679137 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.685633 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.705524 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:13 crc kubenswrapper[4622]: E1126 11:11:13.705734 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.723207 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:13Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.752271 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.753016 4622 scope.go:117] "RemoveContainer" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" Nov 26 11:11:13 crc kubenswrapper[4622]: E1126 11:11:13.753175 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.781393 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.781415 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.781426 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.781437 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.781446 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.884329 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.884405 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.884419 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.884440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.884455 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.986808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.986850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.986859 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.986873 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.986884 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:13Z","lastTransitionTime":"2025-11-26T11:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:13 crc kubenswrapper[4622]: I1126 11:11:13.994439 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:13 crc kubenswrapper[4622]: E1126 11:11:13.994566 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:13 crc kubenswrapper[4622]: E1126 11:11:13.994609 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:15.994597115 +0000 UTC m=+35.585808637 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.089205 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.089241 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.089249 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.089264 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.089274 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.191887 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.191967 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.191978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.192007 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.192026 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.294187 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.294232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.294240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.294253 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.294263 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.396749 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.396818 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.396829 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.396854 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.396868 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.498259 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.498461 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498535 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:11:30.498467908 +0000 UTC m=+50.089679440 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498637 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498666 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498681 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.498704 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498734 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:30.498717238 +0000 UTC m=+50.089928770 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.498757 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.498787 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498909 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498945 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498959 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498950 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.498991 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.499032 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:30.499013868 +0000 UTC m=+50.090225390 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.499054 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:30.499045708 +0000 UTC m=+50.090257229 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.499089 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:30.499071336 +0000 UTC m=+50.090282868 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.500056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.500094 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.500107 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.500124 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.500135 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.602557 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.602598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.602610 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.602627 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.602639 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705058 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705090 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705050 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.705207 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.705262 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:14 crc kubenswrapper[4622]: E1126 11:11:14.705330 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705534 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705567 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705576 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705592 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.705602 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.808992 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.809052 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.809062 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.809083 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.809102 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.912136 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.912208 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.912228 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.912251 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:14 crc kubenswrapper[4622]: I1126 11:11:14.912264 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:14Z","lastTransitionTime":"2025-11-26T11:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.014288 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.014341 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.014350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.014364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.014390 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.116629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.116677 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.116687 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.116701 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.116711 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.219320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.219353 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.219362 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.219392 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.219404 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.322662 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.322709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.322719 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.322733 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.322743 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.426152 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.426200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.426211 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.426228 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.426240 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.528568 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.529028 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.529056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.529091 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.529108 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.632428 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.632486 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.632515 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.632541 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.632554 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.705323 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:15 crc kubenswrapper[4622]: E1126 11:11:15.705533 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.735066 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.735143 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.735160 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.735190 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.735205 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.838365 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.838434 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.838444 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.838466 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.838480 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.941300 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.941385 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.941398 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.941422 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:15 crc kubenswrapper[4622]: I1126 11:11:15.941436 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:15Z","lastTransitionTime":"2025-11-26T11:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: E1126 11:11:16.013925 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.013962 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:16 crc kubenswrapper[4622]: E1126 11:11:16.014023 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:20.014000382 +0000 UTC m=+39.605211914 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.044515 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.044563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.044575 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.044599 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.044610 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.146482 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.146536 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.146548 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.146563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.146572 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.248899 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.248936 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.248945 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.248957 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.248968 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.351456 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.351495 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.351529 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.351546 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.351555 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.454036 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.454073 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.454082 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.454096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.454111 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.555863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.555923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.555934 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.555954 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.555968 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.657954 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.657988 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.657996 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.658009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.658019 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.705693 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.705717 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:16 crc kubenswrapper[4622]: E1126 11:11:16.705813 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.705839 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:16 crc kubenswrapper[4622]: E1126 11:11:16.705915 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:16 crc kubenswrapper[4622]: E1126 11:11:16.705977 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.760022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.760056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.760065 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.760075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.760106 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.862560 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.862588 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.862598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.862607 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.862616 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.965631 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.965693 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.965705 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.965727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:16 crc kubenswrapper[4622]: I1126 11:11:16.965738 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:16Z","lastTransitionTime":"2025-11-26T11:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.068431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.068492 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.068519 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.068543 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.068558 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.171470 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.171522 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.171535 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.171549 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.171560 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.273564 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.273596 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.273609 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.273620 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.273635 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.375913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.375961 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.375970 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.375987 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.375997 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.478090 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.478164 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.478174 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.478186 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.478196 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.581217 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.581256 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.581267 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.581281 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.581292 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.683803 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.683844 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.683852 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.683863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.683872 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.705727 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:17 crc kubenswrapper[4622]: E1126 11:11:17.705908 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.785816 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.785841 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.785850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.785861 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.785868 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.887690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.887732 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.887740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.887753 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.887762 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.990149 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.990195 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.990206 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.990218 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:17 crc kubenswrapper[4622]: I1126 11:11:17.990226 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:17Z","lastTransitionTime":"2025-11-26T11:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.092404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.092447 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.092457 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.092476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.092489 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.193957 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.193994 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.194005 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.194029 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.194041 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.295735 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.295779 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.295790 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.295804 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.295813 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.397588 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.397640 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.397651 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.397681 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.397693 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.499678 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.499727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.499736 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.499751 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.499761 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.601921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.602296 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.602387 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.602473 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.602570 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705016 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705024 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705101 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705133 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705148 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705160 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.705391 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:18 crc kubenswrapper[4622]: E1126 11:11:18.705521 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:18 crc kubenswrapper[4622]: E1126 11:11:18.705597 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:18 crc kubenswrapper[4622]: E1126 11:11:18.705978 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.806850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.807040 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.807123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.807200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.807258 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.909965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.910000 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.910009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.910025 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:18 crc kubenswrapper[4622]: I1126 11:11:18.910034 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:18Z","lastTransitionTime":"2025-11-26T11:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.012360 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.012419 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.012427 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.012442 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.012450 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.114051 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.114086 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.114098 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.114111 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.114120 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.216470 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.216518 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.216527 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.216539 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.216548 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.318598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.318783 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.318863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.318929 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.318989 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.421266 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.421292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.421300 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.421312 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.421320 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.523006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.523042 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.523050 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.523062 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.523072 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.625328 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.625367 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.625387 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.625399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.625408 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.704954 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:19 crc kubenswrapper[4622]: E1126 11:11:19.705077 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.727623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.727671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.727681 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.727700 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.727714 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.830813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.830880 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.830891 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.830908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.830933 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.932936 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.932959 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.932966 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.932978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:19 crc kubenswrapper[4622]: I1126 11:11:19.932985 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:19Z","lastTransitionTime":"2025-11-26T11:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.035349 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.035391 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.035401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.035412 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.035421 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.055031 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:20 crc kubenswrapper[4622]: E1126 11:11:20.055166 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:20 crc kubenswrapper[4622]: E1126 11:11:20.055209 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:28.055196275 +0000 UTC m=+47.646407796 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.138188 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.138225 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.138233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.138245 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.138252 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.240112 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.240148 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.240156 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.240167 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.240174 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.341808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.341839 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.341847 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.341858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.341866 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.443713 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.443773 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.443788 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.443812 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.443827 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.545654 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.545699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.545708 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.545721 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.545732 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.647813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.647861 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.647869 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.647885 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.647895 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.706146 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.706156 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.706194 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:20 crc kubenswrapper[4622]: E1126 11:11:20.706479 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:20 crc kubenswrapper[4622]: E1126 11:11:20.706413 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:20 crc kubenswrapper[4622]: E1126 11:11:20.706564 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.717242 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.725064 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.731732 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.739261 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.748925 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.750326 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.750357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.750365 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.750392 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.750404 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.761222 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.774389 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.786347 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.803746 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.814882 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.824968 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.832722 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.843097 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.853385 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.853430 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.853442 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.853464 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.853473 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.855096 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.863681 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.873052 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.881419 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:20Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.955900 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.955948 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.955957 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.955977 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:20 crc kubenswrapper[4622]: I1126 11:11:20.955989 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:20Z","lastTransitionTime":"2025-11-26T11:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.058981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.059016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.059026 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.059043 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.059054 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.161171 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.161238 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.161249 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.161289 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.161300 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.263259 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.263297 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.263305 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.263318 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.263326 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.365247 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.365279 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.365287 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.365298 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.365307 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.467792 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.467828 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.467855 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.467869 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.467881 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.569848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.569911 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.569923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.569936 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.569946 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.672218 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.672270 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.672278 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.672290 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.672299 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.705892 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.705995 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.774309 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.774343 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.774352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.774366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.774388 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.876305 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.876333 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.876342 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.876354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.876362 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.936401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.936431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.936441 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.936453 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.936460 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.945838 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:21Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.948881 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.948915 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.948923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.948937 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.948945 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.956877 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:21Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.962697 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.962729 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.962739 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.962751 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.962761 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.970730 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:21Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.974404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.974495 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.974537 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.974570 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.974587 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.984653 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:21Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.987215 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.987241 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.987250 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.987262 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.987272 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.995325 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:21Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:21 crc kubenswrapper[4622]: E1126 11:11:21.995446 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.996457 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.996487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.996514 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.996526 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:21 crc kubenswrapper[4622]: I1126 11:11:21.996535 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:21Z","lastTransitionTime":"2025-11-26T11:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.098081 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.098117 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.098127 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.098143 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.098153 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.200577 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.200621 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.200629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.200643 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.200653 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.303012 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.303064 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.303073 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.303086 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.303098 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.405181 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.405225 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.405235 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.405250 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.405260 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.507382 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.507428 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.507440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.507458 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.507471 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.609474 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.609523 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.609532 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.609544 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.609551 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.705885 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:22 crc kubenswrapper[4622]: E1126 11:11:22.705981 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.706033 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:22 crc kubenswrapper[4622]: E1126 11:11:22.706104 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.705888 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:22 crc kubenswrapper[4622]: E1126 11:11:22.706164 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.710978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.711006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.711015 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.711025 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.711033 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.813116 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.813146 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.813156 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.813168 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.813178 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.915207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.915232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.915240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.915249 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:22 crc kubenswrapper[4622]: I1126 11:11:22.915259 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:22Z","lastTransitionTime":"2025-11-26T11:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.017178 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.017220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.017228 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.017244 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.017255 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.118993 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.119022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.119030 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.119041 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.119050 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.220471 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.220540 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.220551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.220563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.220572 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.322257 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.322289 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.322297 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.322307 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.322314 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.424327 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.424363 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.424383 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.424399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.424409 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.526725 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.526762 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.526776 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.526792 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.526802 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.629258 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.629289 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.629296 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.629308 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.629316 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.705181 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:23 crc kubenswrapper[4622]: E1126 11:11:23.705281 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.731305 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.731332 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.731341 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.731353 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.731360 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.832965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.832999 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.833007 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.833021 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.833030 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.935403 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.935434 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.935443 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.935454 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:23 crc kubenswrapper[4622]: I1126 11:11:23.935461 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:23Z","lastTransitionTime":"2025-11-26T11:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.037325 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.037357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.037365 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.037388 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.037396 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.139087 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.139138 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.139146 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.139159 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.139166 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.241311 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.241339 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.241364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.241385 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.241393 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.342944 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.342977 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.342986 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.342998 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.343008 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.444987 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.445019 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.445027 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.445041 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.445050 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.547271 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.547494 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.547607 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.547687 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.547745 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.649994 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.650020 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.650030 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.650042 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.650050 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.705267 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:24 crc kubenswrapper[4622]: E1126 11:11:24.705405 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.705451 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:24 crc kubenswrapper[4622]: E1126 11:11:24.705583 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.705454 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:24 crc kubenswrapper[4622]: E1126 11:11:24.705938 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.706241 4622 scope.go:117] "RemoveContainer" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.751942 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.752083 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.752096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.752109 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.752119 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.853489 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.853545 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.853554 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.853568 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.853576 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.917491 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/1.log" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.919267 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.919593 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.929719 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.941977 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.955232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.955264 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.955272 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.955285 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.955296 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:24Z","lastTransitionTime":"2025-11-26T11:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.956058 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.968110 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:24 crc kubenswrapper[4622]: I1126 11:11:24.985775 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.000513 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:24Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.012978 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.020990 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.030140 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.037822 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.044371 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.052094 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.057634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.057665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.057675 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.057689 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.057697 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.059301 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.065879 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.073351 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.082345 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.095210 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.159047 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.159076 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.159084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.159095 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.159104 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.261153 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.261187 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.261195 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.261208 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.261216 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.363525 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.363555 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.363563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.363575 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.363584 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.465011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.465053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.465061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.465073 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.465083 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.567003 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.567032 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.567040 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.567052 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.567061 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.668336 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.668365 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.668373 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.668392 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.668400 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.705151 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:25 crc kubenswrapper[4622]: E1126 11:11:25.705231 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.770276 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.770301 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.770309 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.770318 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.770326 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.872197 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.872227 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.872235 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.872247 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.872256 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.922492 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/2.log" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.922964 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/1.log" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.925171 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" exitCode=1 Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.925201 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.925227 4622 scope.go:117] "RemoveContainer" containerID="da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.925772 4622 scope.go:117] "RemoveContainer" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" Nov 26 11:11:25 crc kubenswrapper[4622]: E1126 11:11:25.925903 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.940055 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.949107 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.957776 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.965602 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.974045 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.974074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.974083 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.974094 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.974102 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:25Z","lastTransitionTime":"2025-11-26T11:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.975915 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.983015 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.991472 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:25 crc kubenswrapper[4622]: I1126 11:11:25.998842 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:25Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.005716 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.017119 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.025620 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.034176 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.041748 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.048566 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.056248 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.065534 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.076309 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.076344 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.076353 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.076368 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.076388 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.078582 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da4f845429b55667406e0db926955369529c6d19130ebed1be8fa93f5d82af2c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"message\\\":\\\"k controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:10Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:10.575484 6042 services_controller.go:434] Service openshift-machine-api/control-plane-machine-set-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{control-plane-machine-set-operator openshift-machine-api ffd0ef27-d28d-43cc-90c8-0e8843e4c04c 4409 0 2025-02-23 05:12:21 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:control-plane-machine-set-operator] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:control-plane-machine-set-operator-tls service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077ded87 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.178703 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.178743 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.178753 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.178768 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.178778 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.280690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.280739 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.280750 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.280765 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.280775 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.382366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.382422 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.382431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.382447 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.382457 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.484933 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.484961 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.484969 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.484982 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.484990 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.587008 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.587049 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.587057 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.587223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.587232 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.689565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.689609 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.689617 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.689630 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.689638 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.704993 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:26 crc kubenswrapper[4622]: E1126 11:11:26.705066 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.704994 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:26 crc kubenswrapper[4622]: E1126 11:11:26.705141 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.705167 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:26 crc kubenswrapper[4622]: E1126 11:11:26.705279 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.791764 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.791798 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.791807 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.791818 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.791825 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.893623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.893658 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.893667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.893679 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.893688 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.929465 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/2.log" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.932089 4622 scope.go:117] "RemoveContainer" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" Nov 26 11:11:26 crc kubenswrapper[4622]: E1126 11:11:26.932211 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.940874 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.947774 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.955858 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.963089 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.969300 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.976629 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.983394 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.989478 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.995244 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.995270 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.995278 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.995293 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.995301 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:26Z","lastTransitionTime":"2025-11-26T11:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:26 crc kubenswrapper[4622]: I1126 11:11:26.997494 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:26Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.006134 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.017787 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.025680 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.031989 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.044053 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.051522 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.060075 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.067228 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:27Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.096824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.096863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.096873 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.096885 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.096893 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.199075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.199105 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.199113 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.199125 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.199141 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.301061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.301083 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.301091 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.301102 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.301127 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.402800 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.402827 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.402835 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.402862 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.402870 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.504519 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.504547 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.504555 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.504569 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.504577 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.606315 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.606339 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.606347 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.606358 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.606365 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.705913 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:27 crc kubenswrapper[4622]: E1126 11:11:27.705998 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.707985 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.708035 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.708044 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.708057 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.708066 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.809403 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.809434 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.809444 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.809455 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.809463 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.912728 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.912756 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.912765 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.912774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:27 crc kubenswrapper[4622]: I1126 11:11:27.912781 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:27Z","lastTransitionTime":"2025-11-26T11:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.015445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.015469 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.015477 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.015486 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.015494 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.116924 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.116950 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.116960 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.116969 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.116976 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.120387 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:28 crc kubenswrapper[4622]: E1126 11:11:28.120534 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:28 crc kubenswrapper[4622]: E1126 11:11:28.120584 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:11:44.120572239 +0000 UTC m=+63.711783761 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.218357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.218393 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.218401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.218411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.218419 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.319767 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.319816 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.319824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.319836 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.319844 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.421456 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.421487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.421537 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.421549 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.421557 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.523025 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.523077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.523085 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.523095 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.523125 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.624908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.624936 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.624944 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.624955 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.624966 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.705405 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:28 crc kubenswrapper[4622]: E1126 11:11:28.705546 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.705696 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.705773 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:28 crc kubenswrapper[4622]: E1126 11:11:28.705894 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:28 crc kubenswrapper[4622]: E1126 11:11:28.705952 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.726733 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.726762 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.726770 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.726782 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.726792 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.828731 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.828766 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.828774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.828787 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.828795 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.930806 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.930843 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.930853 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.930866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:28 crc kubenswrapper[4622]: I1126 11:11:28.930874 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:28Z","lastTransitionTime":"2025-11-26T11:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.033292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.033332 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.033343 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.033357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.033367 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.135172 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.135212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.135221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.135237 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.135245 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.237143 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.237183 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.237192 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.237203 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.237212 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.339089 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.339120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.339128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.339140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.339149 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.441395 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.441428 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.441436 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.441450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.441458 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.543770 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.544000 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.544009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.544024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.544033 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.645976 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.646009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.646018 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.646031 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.646040 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.705569 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:29 crc kubenswrapper[4622]: E1126 11:11:29.705666 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.747532 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.747565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.747574 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.747586 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.747595 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.849747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.849801 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.849810 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.849823 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.849834 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.950518 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.952157 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.952296 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.952400 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.952464 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.952754 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:29Z","lastTransitionTime":"2025-11-26T11:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.959634 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.962602 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:29Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.970084 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:29Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.977809 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:29Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.984280 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:29Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:29 crc kubenswrapper[4622]: I1126 11:11:29.996557 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:29Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.004303 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.011308 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.017480 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.024667 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.031364 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.039307 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.045333 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.053057 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.055524 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.055560 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.055570 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.055582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.055590 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.059622 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.068062 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.079644 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.086986 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.156881 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.156938 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.156950 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.156962 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.156971 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.258573 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.258594 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.258602 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.258613 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.258620 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.360336 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.360364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.360372 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.360399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.360406 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.462299 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.462336 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.462345 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.462354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.462362 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.537703 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.537765 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.537789 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.537807 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537839 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:02.537815243 +0000 UTC m=+82.129026765 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.537892 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537853 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537923 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537935 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537943 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537965 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:12:02.537949376 +0000 UTC m=+82.129160908 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537983 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:12:02.537975656 +0000 UTC m=+82.129187188 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.537979 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.538008 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.538019 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.538043 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:12:02.538036911 +0000 UTC m=+82.129248443 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.538044 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.538080 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:12:02.538074101 +0000 UTC m=+82.129285624 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.563936 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.564048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.564133 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.564217 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.564272 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.666192 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.666221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.666229 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.666241 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.666252 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.705614 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.705776 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.705703 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.706030 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.706093 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:30 crc kubenswrapper[4622]: E1126 11:11:30.706162 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.714994 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.724148 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.735600 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.749600 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.757346 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.765830 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.768093 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.768130 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.768139 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.768170 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.768178 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.776470 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.784852 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.791866 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.799004 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.807490 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.815282 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.822252 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.832096 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.840568 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.848799 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.856426 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.865468 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:30Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.869753 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.869797 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.869808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.869823 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.869832 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.971132 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.971184 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.971193 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.971207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:30 crc kubenswrapper[4622]: I1126 11:11:30.971215 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:30Z","lastTransitionTime":"2025-11-26T11:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.072930 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.072977 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.072988 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.073004 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.073014 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.175360 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.175396 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.175406 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.175419 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.175427 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.277222 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.277264 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.277272 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.277288 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.277296 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.379042 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.379074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.379084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.379096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.379103 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.480884 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.480919 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.480928 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.480941 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.480950 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.582665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.582719 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.582728 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.582740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.582748 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.684595 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.684628 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.684637 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.684649 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.684657 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.705289 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:31 crc kubenswrapper[4622]: E1126 11:11:31.705613 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.786594 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.786629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.786639 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.786651 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.786659 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.888311 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.888337 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.888345 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.888354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.888362 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.989804 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.989836 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.989846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.989859 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:31 crc kubenswrapper[4622]: I1126 11:11:31.989868 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:31Z","lastTransitionTime":"2025-11-26T11:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.091489 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.091535 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.091545 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.091558 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.091567 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.193658 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.193688 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.193696 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.193707 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.193714 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.253005 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.253040 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.253049 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.253061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.253068 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.261910 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:32Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.264775 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.264811 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.264820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.264834 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.264842 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.273178 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:32Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.275350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.275375 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.275401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.275414 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.275423 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.282693 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:32Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.284699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.284741 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.284751 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.284762 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.284771 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.292411 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:32Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.294376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.294423 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.294431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.294442 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.294450 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.304348 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:32Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.304489 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.305625 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.305657 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.305666 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.305676 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.305683 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.407031 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.407065 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.407074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.407106 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.407115 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.509674 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.509709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.509718 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.509732 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.509741 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.611127 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.611150 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.611158 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.611170 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.611177 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.705077 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.705097 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.705168 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.705240 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.705334 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:32 crc kubenswrapper[4622]: E1126 11:11:32.705464 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.712329 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.712354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.712362 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.712375 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.712394 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.814155 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.814321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.814329 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.814339 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.814347 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.915819 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.915851 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.915859 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.915872 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:32 crc kubenswrapper[4622]: I1126 11:11:32.915881 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:32Z","lastTransitionTime":"2025-11-26T11:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.018075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.018103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.018110 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.018125 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.018133 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.119696 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.119719 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.119727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.119738 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.119747 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.221011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.221038 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.221047 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.221057 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.221065 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.322650 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.322684 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.322692 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.322702 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.322710 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.424011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.424035 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.424043 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.424054 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.424062 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.526011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.526038 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.526046 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.526056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.526064 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.627368 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.627411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.627421 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.627431 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.627438 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.705689 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:33 crc kubenswrapper[4622]: E1126 11:11:33.705788 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.728803 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.728837 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.728846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.728860 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.728868 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.830459 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.830492 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.830520 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.830532 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.830540 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.931960 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.931990 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.931997 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.932009 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:33 crc kubenswrapper[4622]: I1126 11:11:33.932018 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:33Z","lastTransitionTime":"2025-11-26T11:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.034119 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.034220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.034231 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.034243 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.034253 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.136161 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.136189 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.136197 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.136207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.136214 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.237882 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.237904 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.237913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.237921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.237928 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.341642 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.341669 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.341677 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.341709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.341718 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.443180 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.443207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.443215 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.443227 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.443235 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.545093 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.545115 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.545123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.545133 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.545140 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.646499 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.646536 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.646545 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.646556 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.646563 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.705437 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.705487 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:34 crc kubenswrapper[4622]: E1126 11:11:34.705537 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:34 crc kubenswrapper[4622]: E1126 11:11:34.705585 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.705621 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:34 crc kubenswrapper[4622]: E1126 11:11:34.705691 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.748320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.748347 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.748354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.748363 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.748371 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.850855 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.850890 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.850900 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.850912 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.850922 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.952764 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.952813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.952824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.952837 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:34 crc kubenswrapper[4622]: I1126 11:11:34.952845 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:34Z","lastTransitionTime":"2025-11-26T11:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.056983 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.057027 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.057037 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.057047 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.057055 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.158630 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.158668 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.158676 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.158688 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.158697 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.260740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.260775 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.260787 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.260801 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.260816 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.362625 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.362656 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.362665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.362676 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.362685 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.464233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.464277 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.464286 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.464297 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.464305 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.566074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.566118 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.566126 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.566137 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.566145 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.667531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.667561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.667571 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.667583 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.667590 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.705649 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:35 crc kubenswrapper[4622]: E1126 11:11:35.705762 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.769978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.770017 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.770026 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.770039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.770047 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.871548 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.871575 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.871584 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.871596 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.871604 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.973565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.973605 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.973614 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.973623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:35 crc kubenswrapper[4622]: I1126 11:11:35.973636 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:35Z","lastTransitionTime":"2025-11-26T11:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.075230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.075252 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.075260 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.075269 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.075276 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.177403 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.177448 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.177458 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.177470 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.177479 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.279267 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.279300 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.279308 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.279320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.279328 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.381066 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.381088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.381096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.381106 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.381114 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.482551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.482577 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.482586 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.482597 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.482603 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.584189 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.584214 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.584223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.584232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.584239 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.685937 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.685965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.685975 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.685985 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.685992 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.705360 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.705375 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:36 crc kubenswrapper[4622]: E1126 11:11:36.705447 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.705365 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:36 crc kubenswrapper[4622]: E1126 11:11:36.705561 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:36 crc kubenswrapper[4622]: E1126 11:11:36.705593 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.787479 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.787525 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.787533 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.787542 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.787550 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.889556 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.889582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.889589 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.889598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.889605 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.991487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.991538 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.991546 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.991555 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:36 crc kubenswrapper[4622]: I1126 11:11:36.991564 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:36Z","lastTransitionTime":"2025-11-26T11:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.093331 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.093352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.093360 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.093375 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.093393 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.194908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.194932 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.194940 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.194949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.194957 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.297025 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.297048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.297061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.297071 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.297078 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.398401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.398844 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.398911 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.399000 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.399064 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.501086 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.501112 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.501120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.501130 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.501136 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.602882 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.602919 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.602928 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.602965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.602975 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.704957 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:37 crc kubenswrapper[4622]: E1126 11:11:37.705050 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.705285 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.705413 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.705490 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.705573 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.705652 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.807563 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.807611 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.807619 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.807629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.807635 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.909079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.909121 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.909128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.909139 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:37 crc kubenswrapper[4622]: I1126 11:11:37.909146 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:37Z","lastTransitionTime":"2025-11-26T11:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.011100 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.011123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.011131 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.011140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.011148 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.113221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.113241 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.113249 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.113258 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.113266 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.214491 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.214672 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.214728 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.214785 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.214851 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.316031 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.316079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.316088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.316097 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.316105 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.417843 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.418158 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.418228 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.418290 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.418346 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.520364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.520402 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.520411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.520420 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.520427 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.622075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.622095 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.622102 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.622128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.622136 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.704968 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:38 crc kubenswrapper[4622]: E1126 11:11:38.705045 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.705157 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:38 crc kubenswrapper[4622]: E1126 11:11:38.705201 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.705363 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:38 crc kubenswrapper[4622]: E1126 11:11:38.705431 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.723404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.723426 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.723435 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.723445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.723453 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.825152 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.825179 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.825187 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.825214 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.825224 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.927219 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.927282 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.927292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.927303 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:38 crc kubenswrapper[4622]: I1126 11:11:38.927310 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:38Z","lastTransitionTime":"2025-11-26T11:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.029319 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.029368 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.029379 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.029402 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.029409 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.131487 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.131557 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.131567 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.131582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.131591 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.233134 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.233163 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.233176 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.233188 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.233195 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.334759 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.334789 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.334798 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.334810 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.334820 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.436230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.436262 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.436272 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.436285 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.436294 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.538288 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.538321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.538329 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.538342 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.538350 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.639523 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.639560 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.639568 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.639580 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.639590 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.705296 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:39 crc kubenswrapper[4622]: E1126 11:11:39.705404 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.741200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.741226 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.741236 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.741246 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.741254 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.842491 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.842550 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.842561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.842575 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.842585 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.944353 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.944400 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.944411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.944421 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:39 crc kubenswrapper[4622]: I1126 11:11:39.944430 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:39Z","lastTransitionTime":"2025-11-26T11:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.046166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.046203 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.046211 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.046221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.046230 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.148219 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.148250 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.148258 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.148269 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.148277 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.250022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.250049 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.250058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.250070 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.250079 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.351974 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.352006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.352015 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.352028 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.352036 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.453738 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.454182 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.454271 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.454329 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.454402 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.556581 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.556610 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.556617 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.556630 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.556638 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.657934 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.658149 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.658237 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.658295 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.658344 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.705136 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.705157 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.705202 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:40 crc kubenswrapper[4622]: E1126 11:11:40.705237 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:40 crc kubenswrapper[4622]: E1126 11:11:40.705351 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:40 crc kubenswrapper[4622]: E1126 11:11:40.705408 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.705862 4622 scope.go:117] "RemoveContainer" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" Nov 26 11:11:40 crc kubenswrapper[4622]: E1126 11:11:40.706037 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.714217 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.722867 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.729643 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.735627 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.742534 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.748914 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.755956 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.759944 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.759972 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.759981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.759993 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.760001 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.763141 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.771300 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.782162 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.791446 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.804795 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.825936 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.841731 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.851173 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.860777 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.861999 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.862030 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.862039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.862059 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.862068 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.870812 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.877552 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:40Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.963747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.963796 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.963806 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.963818 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:40 crc kubenswrapper[4622]: I1126 11:11:40.963845 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:40Z","lastTransitionTime":"2025-11-26T11:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.065406 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.065439 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.065447 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.065460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.065468 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.167459 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.167494 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.167518 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.167531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.167541 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.269358 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.269415 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.269425 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.269440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.269451 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.371226 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.371261 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.371270 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.371283 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.371292 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.472956 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.472998 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.473007 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.473019 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.473029 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.574814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.574870 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.574878 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.574890 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.574897 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.676787 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.676828 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.676835 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.676846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.676853 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.705147 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:41 crc kubenswrapper[4622]: E1126 11:11:41.705225 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.778842 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.778875 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.778884 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.778896 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.778906 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.881012 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.881046 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.881055 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.881070 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.881079 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.983445 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.983477 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.983485 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.983497 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:41 crc kubenswrapper[4622]: I1126 11:11:41.983532 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:41Z","lastTransitionTime":"2025-11-26T11:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.085422 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.085456 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.085466 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.085478 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.085486 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.187645 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.187677 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.187685 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.187696 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.187721 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.290587 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.290624 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.290634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.290648 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.290661 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.392756 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.392968 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.393042 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.393111 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.393182 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.495725 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.495773 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.495782 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.495796 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.495803 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.597605 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.597636 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.597644 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.597653 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.597659 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.699401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.699486 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.699496 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.699525 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.699532 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.700114 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.700206 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.700276 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.700334 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.700411 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.705319 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.705399 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.705526 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.705569 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.705727 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.705772 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.709534 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:42Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.711423 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.711443 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.711450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.711458 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.711465 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.719375 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:42Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.721332 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.721354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.721361 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.721369 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.721376 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.728670 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:42Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.730785 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.730804 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.730812 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.730820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.730826 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.738398 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:42Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.740224 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.740248 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.740255 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.740264 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.740271 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.748188 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:42Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:42 crc kubenswrapper[4622]: E1126 11:11:42.748308 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.801714 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.801733 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.801741 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.801749 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.801756 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.903460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.903484 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.903495 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.903525 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:42 crc kubenswrapper[4622]: I1126 11:11:42.903534 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:42Z","lastTransitionTime":"2025-11-26T11:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.005246 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.005266 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.005274 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.005284 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.005292 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.106913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.106960 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.106970 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.106985 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.106994 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.209483 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.209612 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.209768 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.209908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.210039 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.312556 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.312695 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.312758 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.312820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.312877 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.414951 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.415055 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.415124 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.415186 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.415244 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.516748 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.516781 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.516791 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.516805 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.516815 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.618348 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.618483 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.618663 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.618809 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.618936 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.705054 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:43 crc kubenswrapper[4622]: E1126 11:11:43.705143 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.720952 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.720982 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.720992 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.721006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.721017 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.822542 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.822572 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.822580 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.822593 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.822604 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.924439 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.924483 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.924493 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.924529 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:43 crc kubenswrapper[4622]: I1126 11:11:43.924543 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:43Z","lastTransitionTime":"2025-11-26T11:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.026248 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.026276 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.026285 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.026297 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.026305 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.128449 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.128635 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.128701 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.128763 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.128819 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.141914 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:44 crc kubenswrapper[4622]: E1126 11:11:44.142102 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:44 crc kubenswrapper[4622]: E1126 11:11:44.142233 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:12:16.142215158 +0000 UTC m=+95.733426690 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.230565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.230603 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.230612 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.230625 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.230635 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.332529 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.332553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.332562 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.332573 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.332582 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.433897 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.433921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.433928 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.433937 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.433944 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.535359 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.535387 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.535406 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.535418 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.535426 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.637707 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.637743 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.637751 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.637766 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.637779 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.705374 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.705429 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:44 crc kubenswrapper[4622]: E1126 11:11:44.705477 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.705545 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:44 crc kubenswrapper[4622]: E1126 11:11:44.705644 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:44 crc kubenswrapper[4622]: E1126 11:11:44.705775 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.739059 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.739077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.739084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.739092 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.739100 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.841016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.841043 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.841051 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.841061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.841068 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.943103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.943137 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.943147 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.943159 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:44 crc kubenswrapper[4622]: I1126 11:11:44.943167 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:44Z","lastTransitionTime":"2025-11-26T11:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.044481 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.044526 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.044535 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.044546 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.044555 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.146667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.146693 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.146700 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.146711 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.146718 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.248774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.248800 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.248808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.248819 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.248827 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.350572 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.350591 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.350598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.350610 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.350617 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.452063 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.452103 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.452112 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.452128 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.452137 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.553829 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.553856 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.553864 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.553874 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.553881 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.655626 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.655670 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.655679 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.655697 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.655706 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.704963 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:45 crc kubenswrapper[4622]: E1126 11:11:45.705084 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.757775 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.757811 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.757820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.757832 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.757840 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.859848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.859892 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.859902 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.859915 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.859923 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.962157 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.962187 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.962195 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.962207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:45 crc kubenswrapper[4622]: I1126 11:11:45.962215 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:45Z","lastTransitionTime":"2025-11-26T11:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.063665 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.063712 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.063722 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.063734 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.063742 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.165945 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.165996 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.166006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.166022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.166030 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.267932 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.267965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.267975 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.267989 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.267997 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.369754 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.369792 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.369803 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.369823 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.369835 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.471460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.471521 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.471531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.471545 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.471554 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.573058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.573084 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.573092 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.573104 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.573114 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.675319 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.675357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.675366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.675380 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.675389 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.705616 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.706715 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:46 crc kubenswrapper[4622]: E1126 11:11:46.706826 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.708779 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:46 crc kubenswrapper[4622]: E1126 11:11:46.709117 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:46 crc kubenswrapper[4622]: E1126 11:11:46.709218 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.777204 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.777232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.777242 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.777254 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.777263 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.879088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.879120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.879129 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.879142 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.879151 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.977689 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/0.log" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.977735 4622 generic.go:334] "Generic (PLEG): container finished" podID="fc4efcee-b872-406d-a694-3572222a8dfc" containerID="4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c" exitCode=1 Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.977763 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerDied","Data":"4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.978136 4622 scope.go:117] "RemoveContainer" containerID="4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.980230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.980262 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.980273 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.980287 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.980298 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:46Z","lastTransitionTime":"2025-11-26T11:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.988731 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:46Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:46 crc kubenswrapper[4622]: I1126 11:11:46.998303 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:46Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.005738 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.013590 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.024325 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.038335 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.051582 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.060257 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.069105 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.077489 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.082811 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.082840 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.082852 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.082866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.082875 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.086973 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.094535 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.102616 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.111630 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.120615 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.127681 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.135144 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.141623 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.185247 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.185277 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.185287 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.185303 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.185313 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.286785 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.286884 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.286893 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.286904 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.286912 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.388956 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.388981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.388990 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.389001 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.389008 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.491633 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.491666 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.491691 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.491705 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.491712 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.593833 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.593865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.593874 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.593887 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.593895 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.696267 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.696314 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.696322 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.696334 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.696344 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.705530 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:47 crc kubenswrapper[4622]: E1126 11:11:47.705630 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.798646 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.798688 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.798696 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.798712 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.798722 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.900730 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.900759 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.900768 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.900781 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.900789 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:47Z","lastTransitionTime":"2025-11-26T11:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.981443 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/0.log" Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.981492 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerStarted","Data":"56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0"} Nov 26 11:11:47 crc kubenswrapper[4622]: I1126 11:11:47.991657 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:47Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.002835 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.002865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.002875 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.002888 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.002897 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.004686 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.012689 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.019718 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.028017 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.035029 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.043203 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.051069 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.057387 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.065295 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.075818 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.087966 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.100626 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.104325 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.104358 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.104367 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.104380 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.104388 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.109222 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.117802 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.126437 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.138032 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.146073 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:48Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.206098 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.206156 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.206167 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.206181 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.206190 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.308212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.308250 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.308259 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.308275 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.308285 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.410851 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.410917 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.410928 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.410943 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.410955 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.512922 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.512956 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.512965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.512977 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.512987 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.615795 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.615828 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.615840 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.615854 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.615862 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.707121 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:48 crc kubenswrapper[4622]: E1126 11:11:48.707236 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.707123 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.707329 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:48 crc kubenswrapper[4622]: E1126 11:11:48.707436 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:48 crc kubenswrapper[4622]: E1126 11:11:48.707529 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.718027 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.718052 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.718062 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.718072 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.718083 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.819873 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.819894 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.819902 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.819915 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.819926 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.923158 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.923234 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.923247 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.923270 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:48 crc kubenswrapper[4622]: I1126 11:11:48.923304 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:48Z","lastTransitionTime":"2025-11-26T11:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.025034 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.025154 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.025219 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.025292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.025357 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.127552 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.127583 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.127592 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.127606 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.127616 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.230019 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.230328 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.230410 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.230523 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.230583 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.333791 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.334144 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.334190 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.334213 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.334232 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.437045 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.437081 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.437090 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.437101 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.437111 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.539091 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.539123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.539131 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.539142 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.539150 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.640894 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.640924 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.640932 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.640943 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.640951 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.705976 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:49 crc kubenswrapper[4622]: E1126 11:11:49.706091 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.742666 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.742698 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.742706 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.742718 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.742726 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.844801 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.844838 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.844846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.844858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.844866 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.946882 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.946909 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.946916 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.946929 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:49 crc kubenswrapper[4622]: I1126 11:11:49.946938 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:49Z","lastTransitionTime":"2025-11-26T11:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.049011 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.049042 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.049049 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.049061 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.049068 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.150640 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.150670 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.150679 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.150690 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.150699 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.252740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.252793 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.252802 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.252815 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.252827 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.354649 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.354697 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.354706 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.354717 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.354724 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.456389 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.456440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.456448 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.456462 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.456470 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.557988 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.558020 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.558028 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.558039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.558049 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.660303 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.660337 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.660363 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.660376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.660385 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.705588 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.705636 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:50 crc kubenswrapper[4622]: E1126 11:11:50.705683 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.705595 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:50 crc kubenswrapper[4622]: E1126 11:11:50.705829 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:50 crc kubenswrapper[4622]: E1126 11:11:50.706150 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.715366 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.727174 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.735772 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.744400 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.751022 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.761815 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.761856 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.761865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.761878 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.761886 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.763914 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.774518 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.782288 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.788985 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.796320 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.804060 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.811581 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.819057 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.829301 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.837721 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.846082 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.855131 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.863476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.863520 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.863529 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.863544 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.863553 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.867814 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:50Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.964866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.964916 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.964925 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.964937 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:50 crc kubenswrapper[4622]: I1126 11:11:50.964945 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:50Z","lastTransitionTime":"2025-11-26T11:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.066740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.066780 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.066791 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.066807 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.066818 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.168993 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.169044 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.169054 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.169069 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.169077 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.271679 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.271713 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.271720 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.271732 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.271742 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.373699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.373732 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.373740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.373772 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.373781 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.474931 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.474963 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.474972 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.474984 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.474993 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.576628 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.576658 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.576666 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.576677 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.576685 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.677868 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.677910 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.677919 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.677931 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.677939 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.705295 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:51 crc kubenswrapper[4622]: E1126 11:11:51.705417 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.781080 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.781134 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.781146 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.781166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.781179 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.883223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.883267 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.883276 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.883289 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.883297 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.986143 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.986189 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.986203 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.986224 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:51 crc kubenswrapper[4622]: I1126 11:11:51.986239 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:51Z","lastTransitionTime":"2025-11-26T11:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.089292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.089359 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.089379 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.089428 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.089447 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.192480 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.192553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.192567 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.192589 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.192603 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.295372 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.295426 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.295451 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.295467 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.295480 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.397321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.397359 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.397368 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.397382 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.397401 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.500694 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.500727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.500737 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.500760 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.500772 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.603120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.603159 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.603168 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.603182 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.603192 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.705520 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.705561 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.705574 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:52 crc kubenswrapper[4622]: E1126 11:11:52.705640 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:52 crc kubenswrapper[4622]: E1126 11:11:52.705775 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706073 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706082 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: E1126 11:11:52.706205 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.706564 4622 scope.go:117] "RemoveContainer" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.808701 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.808733 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.808760 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.808775 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.808785 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.910907 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.910941 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.910949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.910963 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.910972 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:52Z","lastTransitionTime":"2025-11-26T11:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.994182 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/2.log" Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.995954 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:11:52 crc kubenswrapper[4622]: I1126 11:11:52.996365 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.005886 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.014040 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.014066 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.014074 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.014086 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.014094 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.017416 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.033942 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.044030 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.052060 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.059070 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.067133 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.074892 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.081856 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.089867 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.099965 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.110870 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.110915 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.110926 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.110939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.110947 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.114003 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.121983 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.125948 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.125980 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.125989 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.126002 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.126014 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.133948 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.137843 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.147151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.147209 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.147222 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.147246 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.147259 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.153092 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.157230 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.161116 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.161177 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.161190 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.161208 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.161219 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.166601 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.171751 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.175345 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.175626 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.175637 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.175668 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.175680 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.176917 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.184474 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.184594 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.186137 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.186161 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.186170 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.186181 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.186190 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.187144 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.194380 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.288726 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.288758 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.288767 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.288783 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.288791 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.390555 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.390592 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.390602 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.390616 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.390626 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.493451 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.493517 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.493531 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.493551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.493563 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.595774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.595828 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.595843 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.595862 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.595872 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.698557 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.698607 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.698619 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.698638 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.698650 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.705796 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:53 crc kubenswrapper[4622]: E1126 11:11:53.705909 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.801076 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.801114 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.801124 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.801139 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.801150 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.903694 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.903729 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.903739 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.903753 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:53 crc kubenswrapper[4622]: I1126 11:11:53.903761 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:53Z","lastTransitionTime":"2025-11-26T11:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.001059 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/3.log" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.001705 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/2.log" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004194 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" exitCode=1 Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004236 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004286 4622 scope.go:117] "RemoveContainer" containerID="b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004873 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004901 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004910 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004924 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004934 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.004920 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:11:54 crc kubenswrapper[4622]: E1126 11:11:54.005149 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.016095 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.024815 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.033110 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.041784 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.049081 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.055208 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.062727 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.070082 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.076560 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.084582 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.093851 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.105683 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5f87fcf6708aa75a433af1471e6775cf26719343c126aaba2a1321f336f412a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:25Z\\\",\\\"message\\\":\\\"rIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1126 11:11:25.340927 6303 ovnkube.go:599] Stopped ovnkube\\\\nI1126 11:11:25.340871 6303 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1126 11:11:25.341022 6303 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1126 11:11:25.341055 6303 services_controller.go:443] Built service openshift-config-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.161\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1126 11:11:25.341090 6303 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1126 11:11:25.341110 6303 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"lse, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1126 11:11:53.464731 6688 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-z78ph before timer (time: 2025-11-26 11:11:54.927004784 +0000 UTC m=+2.049233149): skip\\\\nF1126 11:11:53.464759 6688 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:53.464\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.106868 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.106891 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.106900 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.106929 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.106938 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.115310 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.122016 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.138822 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.146702 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.154829 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.162308 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:54Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.209454 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.209490 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.209516 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.209534 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.209544 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.312189 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.312252 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.312266 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.312288 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.312304 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.414376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.414450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.414460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.414522 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.414539 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.516334 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.516373 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.516382 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.516408 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.516419 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.619151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.619191 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.619200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.619214 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.619223 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.705466 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.705485 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:54 crc kubenswrapper[4622]: E1126 11:11:54.705632 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.705675 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:54 crc kubenswrapper[4622]: E1126 11:11:54.705749 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:54 crc kubenswrapper[4622]: E1126 11:11:54.705914 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.722105 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.722166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.722178 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.722202 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.722215 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.825406 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.825437 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.825446 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.825460 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.825473 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.928073 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.928243 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.928302 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.928366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:54 crc kubenswrapper[4622]: I1126 11:11:54.928434 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:54Z","lastTransitionTime":"2025-11-26T11:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.009290 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/3.log" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.012875 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:11:55 crc kubenswrapper[4622]: E1126 11:11:55.013016 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.025585 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.030340 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.030372 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.030382 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.030409 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.030422 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.035860 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.044820 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.055424 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.064256 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.072069 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.080410 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.089158 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.096403 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.106168 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.119875 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133030 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133054 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133065 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133089 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.133608 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"lse, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1126 11:11:53.464731 6688 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-z78ph before timer (time: 2025-11-26 11:11:54.927004784 +0000 UTC m=+2.049233149): skip\\\\nF1126 11:11:53.464759 6688 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:53.464\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.144406 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.153332 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.168698 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.178271 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.188414 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.196882 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:55Z is after 2025-08-24T17:21:41Z" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.235608 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.235643 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.235652 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.235668 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.235679 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.338095 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.338165 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.338177 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.338203 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.338218 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.440629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.440704 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.440716 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.440742 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.440758 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.543293 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.543336 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.543354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.543371 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.543380 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.645746 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.645789 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.645798 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.645813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.645823 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.705546 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:55 crc kubenswrapper[4622]: E1126 11:11:55.705668 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.747825 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.747885 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.747899 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.747924 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.747943 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.850553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.850594 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.850604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.850623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.850631 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.953797 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.953848 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.953859 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.953878 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:55 crc kubenswrapper[4622]: I1126 11:11:55.953891 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:55Z","lastTransitionTime":"2025-11-26T11:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.056314 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.056355 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.056364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.056378 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.056408 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.159195 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.159234 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.159243 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.159256 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.159264 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.262048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.262090 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.262100 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.262113 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.262122 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.364145 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.364183 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.364194 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.364211 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.364220 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.466667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.466740 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.466752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.466784 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.466800 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.569166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.569225 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.569236 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.569260 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.569274 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.672215 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.672273 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.672284 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.672307 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.672319 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.705410 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.705462 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.705410 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:56 crc kubenswrapper[4622]: E1126 11:11:56.705581 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:56 crc kubenswrapper[4622]: E1126 11:11:56.705730 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:56 crc kubenswrapper[4622]: E1126 11:11:56.705902 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.774746 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.774774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.774782 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.774795 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.774807 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.877182 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.877215 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.877226 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.877238 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.877246 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.979462 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.979549 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.979565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.979587 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:56 crc kubenswrapper[4622]: I1126 11:11:56.979600 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:56Z","lastTransitionTime":"2025-11-26T11:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.081441 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.081475 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.081484 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.081497 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.081518 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.184239 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.184308 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.184323 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.184352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.184365 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.286330 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.286366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.286376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.286404 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.286412 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.388782 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.388832 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.388846 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.388863 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.388873 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.490805 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.490847 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.490858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.490874 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.490883 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.593496 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.593602 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.593612 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.593629 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.593639 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.696041 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.696082 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.696092 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.696106 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.696120 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.705327 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:57 crc kubenswrapper[4622]: E1126 11:11:57.705463 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.798050 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.798095 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.798104 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.798120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.798130 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.900714 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.900749 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.900757 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.900770 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:57 crc kubenswrapper[4622]: I1126 11:11:57.900781 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:57Z","lastTransitionTime":"2025-11-26T11:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.003288 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.003327 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.003337 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.003352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.003359 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.104484 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.104538 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.104548 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.104560 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.104568 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.206336 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.206364 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.206372 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.206386 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.206407 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.308207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.308246 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.308255 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.308271 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.308280 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.410479 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.410558 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.410570 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.410603 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.410617 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.513230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.513269 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.513278 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.513292 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.513305 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.615573 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.615684 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.615699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.615724 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.615740 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.705869 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:11:58 crc kubenswrapper[4622]: E1126 11:11:58.706025 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.705891 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:11:58 crc kubenswrapper[4622]: E1126 11:11:58.706122 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.705891 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:11:58 crc kubenswrapper[4622]: E1126 11:11:58.706194 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.718240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.718283 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.718295 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.718311 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.718321 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.820188 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.820698 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.820777 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.820845 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.820898 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.923320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.923598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.923671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.923737 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:58 crc kubenswrapper[4622]: I1126 11:11:58.923808 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:58Z","lastTransitionTime":"2025-11-26T11:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.025951 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.026133 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.026220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.026291 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.026346 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.128703 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.128795 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.128809 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.128836 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.128849 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.231951 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.232003 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.232016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.232036 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.232051 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.334716 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.334774 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.334785 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.334808 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.334824 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.436973 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.437013 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.437022 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.437036 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.437044 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.539350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.539408 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.539417 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.539430 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.539438 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.642458 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.642527 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.642537 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.642550 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.642562 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.705304 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:11:59 crc kubenswrapper[4622]: E1126 11:11:59.705551 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.745841 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.745923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.745933 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.745957 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.745979 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.848833 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.848872 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.848881 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.848898 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.848908 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.951092 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.951129 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.951140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.951155 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:11:59 crc kubenswrapper[4622]: I1126 11:11:59.951165 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:11:59Z","lastTransitionTime":"2025-11-26T11:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.052900 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.052991 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.053008 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.053032 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.053044 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.154822 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.154857 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.154865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.154877 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.154886 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.256789 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.256821 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.256830 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.256843 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.256851 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.359016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.359053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.359067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.359079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.359087 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.461479 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.461553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.461565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.461582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.461591 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.564582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.564623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.564632 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.564646 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.564655 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.667371 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.667420 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.667430 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.667444 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.667456 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.706089 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.706134 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.706121 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:00 crc kubenswrapper[4622]: E1126 11:12:00.706236 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:00 crc kubenswrapper[4622]: E1126 11:12:00.706562 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:00 crc kubenswrapper[4622]: E1126 11:12:00.706650 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.722791 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc8d37da-b4c0-4fd2-90c2-dd621465f342\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b37ba050446d461fd5427939ec8bcb405444baf508319963d16bb7cde34064cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d600e503a8217d45e32608d2d2797c2c363cc508c9a1cac96041f639b36453a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55e06241ee313cd2d233ea066fdf6ffe1dada801a053819d6ba293964e863a83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.733448 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10a81b2ab010cc8f0e3a8cbd98d8e7ced4049c55a5ddb087506ae404cc2d244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.742572 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.752256 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vmw42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc4efcee-b872-406d-a694-3572222a8dfc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:46Z\\\",\\\"message\\\":\\\"2025-11-26T11:11:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7\\\\n2025-11-26T11:11:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e7cb9542-ae4e-40fe-938a-af2c06b83df7 to /host/opt/cni/bin/\\\\n2025-11-26T11:11:01Z [verbose] multus-daemon started\\\\n2025-11-26T11:11:01Z [verbose] Readiness Indicator file check\\\\n2025-11-26T11:11:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwxwz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vmw42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.759568 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-z78ph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tvfww\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-z78ph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.770018 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.770048 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.770057 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.770072 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.770082 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.773399 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f753c9f6-acf7-46b4-9b1f-2feebf1141ab\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20890e2d3ce4ca911c19bcc3ff264d8b79d8378133a44207f2fd821ff281c1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a872081dd8889fc91e16f05548c160c1df5d446146c7992ca200d925fba906\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd302e2d65cf5e9c8dc14874794ffe2bd348ab74db38146326644a6126f5430\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4f6ff0cfb7a872b692f9f3a9ba49ca277b5ad4fa3900cc83a6d6267a2cc895c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05e45137e4df3672e3ada4f1e2eb7b266d5181ff1a2443bbb3beb7ab5393e5d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81e848c92328fe5cf9de2f8c99c2b2f954d492fa43ee5d39af536be8cbcac774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3799062d1f584aed8d00c5dd7d904e9f22eec6e938fe3ead898d30a9627c8695\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca142e2bfe2f1cc45b7866535869b15ae6616ef53ac52c21b66aa0a930a9d2f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.784357 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"862cb43b-4d06-4de6-b69b-3df1965f8ea2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1126 11:10:52.740704 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1126 11:10:52.742464 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2300610594/tls.crt::/tmp/serving-cert-2300610594/tls.key\\\\\\\"\\\\nI1126 11:10:58.222771 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1126 11:10:58.224563 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1126 11:10:58.224579 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1126 11:10:58.224597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1126 11:10:58.224601 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1126 11:10:58.231038 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1126 11:10:58.231064 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1126 11:10:58.231072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1126 11:10:58.231075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1126 11:10:58.231078 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1126 11:10:58.231081 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1126 11:10:58.232356 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1126 11:10:58.233671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.792711 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d93620663371c83fd530bc65d78ef82da0c2f1c671076c873b109829b5538ca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.800447 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-69txw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a91aff9-4cb8-4cab-acef-c37fbfa011ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://beb664f9dd3c4f81af12e995e38598d49cf93833ff5dd835ceb2e42b74fcabf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9qzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-69txw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.808101 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.815423 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4b2dbdb-8e61-40e9-86ae-3dba474c215b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a40aa751fcf2134017c60a7511d5311646ff721807efdb224b28508800583f4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fq56j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k565w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.824150 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a50c801-f165-4548-b4b3-f4218c4bf866\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcfd0d3c7123cf20a72a99f6eb171bfde6b6930b0725010d05dd6c03c9838b7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d72500572c691e375fc8eae060f7f2296fcae5f47b4ada9e1a6a6cb1e2eb417a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aa986cd95cb36fa13517fe871cfb3d701525d13c901e2cd0349b2e67b3a28d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458aec06b20cec936f254f90207df8ea6d629395ec6441d1ca5d2e10a525160a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:10:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:10:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:10:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.833862 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68156c18-d363-47be-990d-c722986bdfae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64a18b5a9caeb1fc29015d9fff8525d0b3b73f35c773e21191bf69f53012f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ebc82edd0aae00656facf92e91ed1165d9b7131910edad8f72ad023e7ec0df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rqdtq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8g4tm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.841748 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cqclz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba36f366-6db7-4902-90b4-6630fa4b1602\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af89a1c3ec6764f8e50b8e952e8d260172c7e0d4562802ab788d03d8ee5d3695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxtls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cqclz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.850157 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://655fe72344316759d8ab334f8cad74a0dbe4d79e6ddff74220dd5f6f2f5caf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9033f981921135a298e155e819f4c811206ef94e0c6d966e1e99a4e69874b420\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:10:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.857969 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-26T11:10:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.867860 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"673ce8e8-772e-4a53-9cc4-7cd647a16d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0def82ab6bfb2fcd8c000727c26138fbdd5e067021e932d4b7e2b1919e501e8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30760ac37146cc73003e396d6daee09858c895a2107387bd750bab0ff6ed0c1f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a2f4d8e68125f8ab1044ae5631cc538bef0cfb7e20551c96cda5af71b9a62e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ba7cfcf685717b71c19b80e114e94d15cfbb9d5be77d41b6a099ea86e90513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159b600c07612e450c2a9154d27b621c355073f23a8c723e77f5950555ccc7a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0a361ad6ac6ceaae00f02f76c02e224517677e1eb4fc53c0652799ddeb3248\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f5946e8610f1d7a271f2d4c8765be279c7328f026520c779535b680d4db7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9wxnt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xf2zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.871721 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.871771 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.871783 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.871798 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.871809 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.881249 4622 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cf9b509-1f95-4119-a348-92cba5fc8bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-26T11:11:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-26T11:11:53Z\\\",\\\"message\\\":\\\"lse, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1126 11:11:53.464731 6688 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-z78ph before timer (time: 2025-11-26 11:11:54.927004784 +0000 UTC m=+2.049233149): skip\\\\nF1126 11:11:53.464759 6688 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:11:53Z is after 2025-08-24T17:21:41Z]\\\\nI1126 11:11:53.464\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-26T11:11:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-26T11:11:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-26T11:11:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49744\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-26T11:11:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-qx5dc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:00Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.973747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.973778 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.973786 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.973803 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:00 crc kubenswrapper[4622]: I1126 11:12:00.973812 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:00Z","lastTransitionTime":"2025-11-26T11:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.076088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.076140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.076149 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.076165 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.076174 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.178776 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.178820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.178830 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.178849 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.178859 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.281301 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.281346 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.281354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.281369 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.281378 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.383113 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.383151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.383160 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.383175 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.383184 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.485186 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.485228 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.485238 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.485253 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.485262 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.587341 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.587378 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.587399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.587412 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.587421 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.689752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.689813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.689823 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.689838 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.689846 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.705205 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:01 crc kubenswrapper[4622]: E1126 11:12:01.705320 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.792151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.792194 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.792202 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.792212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.792219 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.893869 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.893900 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.893907 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.893919 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.893926 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.996053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.996089 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.996096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.996107 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:01 crc kubenswrapper[4622]: I1126 11:12:01.996117 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:01Z","lastTransitionTime":"2025-11-26T11:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.098163 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.098196 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.098204 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.098214 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.098220 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.199776 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.199806 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.199814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.199824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.199830 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.301334 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.301367 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.301376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.301399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.301408 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.403814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.403847 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.403857 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.403869 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.403877 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.506420 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.506455 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.506463 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.506477 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.506486 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.608579 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.608637 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.608646 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.608659 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.608668 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.609160 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.609275 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609314 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:06.609293479 +0000 UTC m=+146.200505002 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.609343 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.609372 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609376 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609410 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609419 4622 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.609433 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609454 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-26 11:13:06.609442471 +0000 UTC m=+146.200653993 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609457 4622 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609493 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:13:06.609483819 +0000 UTC m=+146.200695341 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609492 4622 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609524 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609536 4622 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609545 4622 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609547 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-26 11:13:06.609539995 +0000 UTC m=+146.200751517 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.609577 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-26 11:13:06.609570883 +0000 UTC m=+146.200782405 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.705723 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.705826 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.705860 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.705912 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.706032 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:02 crc kubenswrapper[4622]: E1126 11:12:02.706108 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.709986 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.710016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.710024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.710037 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.710045 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.811705 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.811736 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.811745 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.811758 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.811766 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.913565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.913601 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.913609 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.913622 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:02 crc kubenswrapper[4622]: I1126 11:12:02.913631 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:02Z","lastTransitionTime":"2025-11-26T11:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.015743 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.015780 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.015792 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.015806 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.015815 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.117612 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.117642 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.117651 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.117681 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.117691 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.220866 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.220897 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.220905 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.220917 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.220925 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.322912 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.322942 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.322951 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.322962 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.322970 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.425177 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.425213 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.425221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.425234 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.425242 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.527522 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.527551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.527559 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.527571 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.527581 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.555549 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.555582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.555592 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.555604 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.555614 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.564870 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.567653 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.567677 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.567685 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.567695 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.567702 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.575724 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.577755 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.577789 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.577798 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.577810 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.577819 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.585969 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.588089 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.588120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.588129 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.588140 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.588148 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.595775 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.597894 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.597921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.597930 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.597940 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.597948 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.605575 4622 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-26T11:12:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c657949b-a8f1-4d3e-9908-1148dfba75d3\\\",\\\"systemUUID\\\":\\\"cec42615-21c9-4929-b427-d0d60aa981a6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-26T11:12:03Z is after 2025-08-24T17:21:41Z" Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.605707 4622 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.628586 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.628613 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.628637 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.628652 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.628660 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.704989 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:03 crc kubenswrapper[4622]: E1126 11:12:03.705095 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.730129 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.730157 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.730166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.730176 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.730183 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.832576 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.832608 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.832615 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.832626 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.832634 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.934347 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.934393 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.934402 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.934416 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:03 crc kubenswrapper[4622]: I1126 11:12:03.934427 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:03Z","lastTransitionTime":"2025-11-26T11:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.036471 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.036541 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.036551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.036561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.036569 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.138200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.138237 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.138245 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.138255 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.138262 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.240400 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.240432 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.240440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.240450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.240458 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.342627 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.342653 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.342661 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.342671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.342678 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.444649 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.444684 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.444693 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.444706 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.444716 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.546586 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.546687 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.546696 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.546709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.546718 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.648592 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.648625 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.648633 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.648644 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.648652 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.705125 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.705177 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.705192 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:04 crc kubenswrapper[4622]: E1126 11:12:04.705281 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:04 crc kubenswrapper[4622]: E1126 11:12:04.705451 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:04 crc kubenswrapper[4622]: E1126 11:12:04.705557 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.750200 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.750225 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.750233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.750244 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.750252 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.851660 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.851687 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.851695 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.851706 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.851715 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.954230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.954262 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.954270 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.954284 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:04 crc kubenswrapper[4622]: I1126 11:12:04.954292 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:04Z","lastTransitionTime":"2025-11-26T11:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.056021 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.056047 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.056054 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.056065 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.056073 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.157876 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.157913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.157923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.157935 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.157944 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.259951 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.260021 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.260034 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.260046 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.260055 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.361981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.362015 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.362024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.362036 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.362046 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.463995 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.464024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.464049 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.464059 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.464067 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.565450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.565479 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.565488 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.565498 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.565525 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.667111 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.667144 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.667153 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.667163 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.667171 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.705485 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:05 crc kubenswrapper[4622]: E1126 11:12:05.705709 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.768133 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.768153 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.768162 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.768172 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.768180 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.869450 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.869478 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.869485 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.869524 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.869533 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.970930 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.970968 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.970977 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.970989 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:05 crc kubenswrapper[4622]: I1126 11:12:05.971000 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:05Z","lastTransitionTime":"2025-11-26T11:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.073004 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.073034 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.073067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.073080 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.073087 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.174574 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.174616 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.174624 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.174634 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.174642 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.276068 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.276110 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.276118 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.276132 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.276140 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.378402 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.378428 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.378436 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.378448 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.378457 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.480671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.480711 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.480718 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.480734 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.480743 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.582176 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.582212 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.582220 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.582233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.582242 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.683354 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.683376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.683399 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.683408 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.683416 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.705960 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.705995 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:06 crc kubenswrapper[4622]: E1126 11:12:06.706076 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.706104 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:06 crc kubenswrapper[4622]: E1126 11:12:06.706279 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:06 crc kubenswrapper[4622]: E1126 11:12:06.706373 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.785321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.785344 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.785351 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.785376 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.785395 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.886849 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.886904 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.886914 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.886923 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.886929 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.988659 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.988688 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.988697 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.988709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:06 crc kubenswrapper[4622]: I1126 11:12:06.988718 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:06Z","lastTransitionTime":"2025-11-26T11:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.090291 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.090321 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.090328 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.090337 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.090344 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.191752 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.191777 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.191786 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.191814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.191822 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.293268 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.293293 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.293300 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.293309 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.293316 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.395747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.395792 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.395801 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.395811 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.395818 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.497561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.497582 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.497590 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.497609 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.497617 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.599836 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.599861 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.599868 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.599879 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.599885 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.701230 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.701254 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.701263 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.701274 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.701283 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.704902 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:07 crc kubenswrapper[4622]: E1126 11:12:07.704982 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.705770 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:12:07 crc kubenswrapper[4622]: E1126 11:12:07.705918 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.803300 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.803319 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.803328 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.803337 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.803343 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.905207 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.905232 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.905240 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.905249 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:07 crc kubenswrapper[4622]: I1126 11:12:07.905257 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:07Z","lastTransitionTime":"2025-11-26T11:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.006990 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.007016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.007025 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.007034 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.007041 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.108909 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.108934 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.108943 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.108953 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.108960 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.210555 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.210598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.210608 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.210621 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.210632 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.312366 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.312429 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.312440 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.312452 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.312462 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.414953 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.414998 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.415006 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.415015 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.415022 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.516824 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.516850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.516858 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.516907 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.516916 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.618842 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.618965 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.619053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.619142 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.619319 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.705970 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.705991 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:08 crc kubenswrapper[4622]: E1126 11:12:08.706276 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:08 crc kubenswrapper[4622]: E1126 11:12:08.706295 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.706108 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:08 crc kubenswrapper[4622]: E1126 11:12:08.706351 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.720887 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.720913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.720921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.720931 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.720938 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.822674 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.822707 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.822716 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.822727 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.822735 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.924188 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.924213 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.924221 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.924231 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:08 crc kubenswrapper[4622]: I1126 11:12:08.924239 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:08Z","lastTransitionTime":"2025-11-26T11:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.025921 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.025949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.025956 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.025966 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.025974 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.128056 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.128091 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.128098 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.128108 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.128116 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.230352 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.230392 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.230401 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.230411 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.230418 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.331814 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.331850 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.331861 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.331874 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.331882 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.433209 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.433233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.433241 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.433251 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.433258 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.534938 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.534966 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.534974 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.534985 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.534994 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.636913 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.636939 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.636968 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.636981 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.636989 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.705901 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:09 crc kubenswrapper[4622]: E1126 11:12:09.705991 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.713405 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.738581 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.738671 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.738680 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.738691 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.738698 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.840310 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.840338 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.840347 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.840357 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.840365 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.941784 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.941813 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.941822 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.941831 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:09 crc kubenswrapper[4622]: I1126 11:12:09.941838 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:09Z","lastTransitionTime":"2025-11-26T11:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.043299 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.043322 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.043330 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.043339 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.043347 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.145466 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.145494 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.145521 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.145532 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.145539 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.247320 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.247342 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.247350 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.247363 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.247370 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.349522 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.349584 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.349597 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.349623 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.349644 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.452618 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.452659 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.452667 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.452685 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.452695 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.554489 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.554544 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.554553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.554565 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.554574 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.656804 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.656887 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.656899 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.656933 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.656948 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.705357 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:10 crc kubenswrapper[4622]: E1126 11:12:10.705632 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.705695 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.705693 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:10 crc kubenswrapper[4622]: E1126 11:12:10.705785 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:10 crc kubenswrapper[4622]: E1126 11:12:10.705834 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.720210 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-cqclz" podStartSLOduration=71.720193758 podStartE2EDuration="1m11.720193758s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.720008849 +0000 UTC m=+90.311220361" watchObservedRunningTime="2025-11-26 11:12:10.720193758 +0000 UTC m=+90.311405279" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.738707 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8g4tm" podStartSLOduration=71.738406599 podStartE2EDuration="1m11.738406599s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.737597765 +0000 UTC m=+90.328809286" watchObservedRunningTime="2025-11-26 11:12:10.738406599 +0000 UTC m=+90.329618121" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.759109 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.759141 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.759149 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.759163 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.759171 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.765101 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-xf2zs" podStartSLOduration=71.765088465 podStartE2EDuration="1m11.765088465s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.75009943 +0000 UTC m=+90.341310952" watchObservedRunningTime="2025-11-26 11:12:10.765088465 +0000 UTC m=+90.356299987" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.782689 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.782660938 podStartE2EDuration="1.782660938s" podCreationTimestamp="2025-11-26 11:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.771627161 +0000 UTC m=+90.362838683" watchObservedRunningTime="2025-11-26 11:12:10.782660938 +0000 UTC m=+90.373872461" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.815338 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vmw42" podStartSLOduration=71.815309683 podStartE2EDuration="1m11.815309683s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.814374139 +0000 UTC m=+90.405585661" watchObservedRunningTime="2025-11-26 11:12:10.815309683 +0000 UTC m=+90.406521205" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.861413 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.861452 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.861461 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.861477 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.861486 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.867893 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=70.867871392 podStartE2EDuration="1m10.867871392s" podCreationTimestamp="2025-11-26 11:11:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.864254004 +0000 UTC m=+90.455465526" watchObservedRunningTime="2025-11-26 11:12:10.867871392 +0000 UTC m=+90.459082905" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.878612 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.878593613 podStartE2EDuration="1m12.878593613s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.877841136 +0000 UTC m=+90.469052658" watchObservedRunningTime="2025-11-26 11:12:10.878593613 +0000 UTC m=+90.469805135" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.894364 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-69txw" podStartSLOduration=72.89434724 podStartE2EDuration="1m12.89434724s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.893786372 +0000 UTC m=+90.484997894" watchObservedRunningTime="2025-11-26 11:12:10.89434724 +0000 UTC m=+90.485558762" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.910681 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podStartSLOduration=71.910669878 podStartE2EDuration="1m11.910669878s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.910302705 +0000 UTC m=+90.501514228" watchObservedRunningTime="2025-11-26 11:12:10.910669878 +0000 UTC m=+90.501881400" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.928895 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.928877139 podStartE2EDuration="1m12.928877139s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.928454522 +0000 UTC m=+90.519666074" watchObservedRunningTime="2025-11-26 11:12:10.928877139 +0000 UTC m=+90.520088651" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.929066 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=41.929060995 podStartE2EDuration="41.929060995s" podCreationTimestamp="2025-11-26 11:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:10.918623842 +0000 UTC m=+90.509835363" watchObservedRunningTime="2025-11-26 11:12:10.929060995 +0000 UTC m=+90.520272517" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.964553 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.964598 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.964607 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.964622 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:10 crc kubenswrapper[4622]: I1126 11:12:10.964633 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:10Z","lastTransitionTime":"2025-11-26T11:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.066654 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.066691 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.066699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.066712 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.066722 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.168787 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.168820 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.168828 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.168840 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.168849 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.270678 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.270699 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.270706 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.270715 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.270722 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.372597 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.372631 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.372640 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.372650 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.372658 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.473975 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.473997 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.474007 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.474016 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.474024 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.576037 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.576067 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.576075 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.576085 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.576093 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.678190 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.678223 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.678233 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.678244 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.678252 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.705064 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:11 crc kubenswrapper[4622]: E1126 11:12:11.705144 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.779905 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.779932 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.779940 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.779949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.779957 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.882058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.882088 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.882097 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.882108 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.882118 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.983978 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.984005 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.984013 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.984024 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:11 crc kubenswrapper[4622]: I1126 11:12:11.984031 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:11Z","lastTransitionTime":"2025-11-26T11:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.085346 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.085387 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.085396 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.085407 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.085414 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.187709 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.187739 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.187747 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.187759 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.187767 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.289208 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.289239 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.289247 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.289258 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.289266 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.391125 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.391157 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.391166 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.391180 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.391188 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.492914 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.492940 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.492949 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.492961 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.492969 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.594867 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.594908 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.594916 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.594925 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.594935 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.697077 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.697108 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.697123 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.697134 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.697142 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.705385 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.705413 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:12 crc kubenswrapper[4622]: E1126 11:12:12.705466 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.705394 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:12 crc kubenswrapper[4622]: E1126 11:12:12.705550 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:12 crc kubenswrapper[4622]: E1126 11:12:12.705660 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.799004 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.799031 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.799039 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.799052 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.799061 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.900991 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.901021 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.901046 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.901058 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:12 crc kubenswrapper[4622]: I1126 11:12:12.901066 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:12Z","lastTransitionTime":"2025-11-26T11:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.003079 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.003111 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.003119 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.003132 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.003140 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.105151 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.105190 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.105199 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.105213 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.105227 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.207476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.207541 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.207551 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.207571 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.207583 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.309576 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.309636 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.309647 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.309664 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.309673 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.411865 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.411920 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.411931 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.411946 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.411957 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.514258 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.514303 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.514314 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.514332 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.514344 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.617476 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.617534 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.617547 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.617561 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.617571 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.705231 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:13 crc kubenswrapper[4622]: E1126 11:12:13.705418 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.709053 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.709096 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.709105 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.709120 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.709129 4622 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-26T11:12:13Z","lastTransitionTime":"2025-11-26T11:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.742200 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg"] Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.742594 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.744072 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.744137 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.744627 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.744854 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.907312 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.907396 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.907415 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.907430 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:13 crc kubenswrapper[4622]: I1126 11:12:13.907451 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008288 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008323 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008342 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008364 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008402 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008720 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.008720 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.010582 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.013865 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.021310 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7hkcg\" (UID: \"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.055199 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.705629 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.705685 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:14 crc kubenswrapper[4622]: I1126 11:12:14.705701 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:14 crc kubenswrapper[4622]: E1126 11:12:14.706693 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:14 crc kubenswrapper[4622]: E1126 11:12:14.706769 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:14 crc kubenswrapper[4622]: E1126 11:12:14.706736 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:15 crc kubenswrapper[4622]: I1126 11:12:15.056080 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" event={"ID":"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45","Type":"ContainerStarted","Data":"216c395738e922acf24a4f834933fadb2619155c8a4beca10085eba3b605f11a"} Nov 26 11:12:15 crc kubenswrapper[4622]: I1126 11:12:15.056128 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" event={"ID":"9ffe4f8e-53cb-4c28-9ea0-f4ba6b501b45","Type":"ContainerStarted","Data":"53b8800e8f389f91447c894154222956828a967121b96f4545e7580949eea4ec"} Nov 26 11:12:15 crc kubenswrapper[4622]: I1126 11:12:15.705644 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:15 crc kubenswrapper[4622]: E1126 11:12:15.705784 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:16 crc kubenswrapper[4622]: I1126 11:12:16.229210 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:16 crc kubenswrapper[4622]: E1126 11:12:16.229414 4622 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:12:16 crc kubenswrapper[4622]: E1126 11:12:16.229481 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs podName:f23a93f9-15cf-4dfd-802d-4b6bd04bbf81 nodeName:}" failed. No retries permitted until 2025-11-26 11:13:20.229465758 +0000 UTC m=+159.820677290 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs") pod "network-metrics-daemon-z78ph" (UID: "f23a93f9-15cf-4dfd-802d-4b6bd04bbf81") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 26 11:12:16 crc kubenswrapper[4622]: I1126 11:12:16.705427 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:16 crc kubenswrapper[4622]: I1126 11:12:16.705526 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:16 crc kubenswrapper[4622]: I1126 11:12:16.705650 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:16 crc kubenswrapper[4622]: E1126 11:12:16.705819 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:16 crc kubenswrapper[4622]: E1126 11:12:16.705911 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:16 crc kubenswrapper[4622]: E1126 11:12:16.705996 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:17 crc kubenswrapper[4622]: I1126 11:12:17.705101 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:17 crc kubenswrapper[4622]: E1126 11:12:17.705214 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:18 crc kubenswrapper[4622]: I1126 11:12:18.705591 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:18 crc kubenswrapper[4622]: I1126 11:12:18.705697 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:18 crc kubenswrapper[4622]: I1126 11:12:18.705795 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:18 crc kubenswrapper[4622]: E1126 11:12:18.705908 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:18 crc kubenswrapper[4622]: E1126 11:12:18.706001 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:18 crc kubenswrapper[4622]: E1126 11:12:18.706099 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:19 crc kubenswrapper[4622]: I1126 11:12:19.705010 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:19 crc kubenswrapper[4622]: E1126 11:12:19.705155 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:20 crc kubenswrapper[4622]: I1126 11:12:20.704985 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:20 crc kubenswrapper[4622]: I1126 11:12:20.704985 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:20 crc kubenswrapper[4622]: I1126 11:12:20.705033 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:20 crc kubenswrapper[4622]: E1126 11:12:20.705784 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:20 crc kubenswrapper[4622]: E1126 11:12:20.705900 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:20 crc kubenswrapper[4622]: E1126 11:12:20.706164 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:20 crc kubenswrapper[4622]: I1126 11:12:20.706398 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:12:20 crc kubenswrapper[4622]: E1126 11:12:20.706535 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-qx5dc_openshift-ovn-kubernetes(9cf9b509-1f95-4119-a348-92cba5fc8bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" Nov 26 11:12:21 crc kubenswrapper[4622]: I1126 11:12:21.705438 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:21 crc kubenswrapper[4622]: E1126 11:12:21.705683 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:22 crc kubenswrapper[4622]: I1126 11:12:22.705220 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:22 crc kubenswrapper[4622]: I1126 11:12:22.705239 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:22 crc kubenswrapper[4622]: I1126 11:12:22.705245 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:22 crc kubenswrapper[4622]: E1126 11:12:22.705528 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:22 crc kubenswrapper[4622]: E1126 11:12:22.705742 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:22 crc kubenswrapper[4622]: E1126 11:12:22.705839 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:23 crc kubenswrapper[4622]: I1126 11:12:23.705615 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:23 crc kubenswrapper[4622]: E1126 11:12:23.705709 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:24 crc kubenswrapper[4622]: I1126 11:12:24.705462 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:24 crc kubenswrapper[4622]: I1126 11:12:24.705554 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:24 crc kubenswrapper[4622]: E1126 11:12:24.705579 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:24 crc kubenswrapper[4622]: E1126 11:12:24.705655 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:24 crc kubenswrapper[4622]: I1126 11:12:24.705708 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:24 crc kubenswrapper[4622]: E1126 11:12:24.705751 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:25 crc kubenswrapper[4622]: I1126 11:12:25.705891 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:25 crc kubenswrapper[4622]: E1126 11:12:25.705981 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:26 crc kubenswrapper[4622]: I1126 11:12:26.705326 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:26 crc kubenswrapper[4622]: I1126 11:12:26.705378 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:26 crc kubenswrapper[4622]: I1126 11:12:26.705406 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:26 crc kubenswrapper[4622]: E1126 11:12:26.705459 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:26 crc kubenswrapper[4622]: E1126 11:12:26.705529 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:26 crc kubenswrapper[4622]: E1126 11:12:26.705572 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:27 crc kubenswrapper[4622]: I1126 11:12:27.705323 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:27 crc kubenswrapper[4622]: E1126 11:12:27.705933 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:28 crc kubenswrapper[4622]: I1126 11:12:28.705726 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:28 crc kubenswrapper[4622]: E1126 11:12:28.705833 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:28 crc kubenswrapper[4622]: I1126 11:12:28.705868 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:28 crc kubenswrapper[4622]: I1126 11:12:28.705956 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:28 crc kubenswrapper[4622]: E1126 11:12:28.705963 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:28 crc kubenswrapper[4622]: E1126 11:12:28.706083 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:29 crc kubenswrapper[4622]: I1126 11:12:29.705775 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:29 crc kubenswrapper[4622]: E1126 11:12:29.705878 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:30 crc kubenswrapper[4622]: I1126 11:12:30.705971 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:30 crc kubenswrapper[4622]: I1126 11:12:30.705968 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:30 crc kubenswrapper[4622]: I1126 11:12:30.706015 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:30 crc kubenswrapper[4622]: E1126 11:12:30.706982 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:30 crc kubenswrapper[4622]: E1126 11:12:30.707026 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:30 crc kubenswrapper[4622]: E1126 11:12:30.707069 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:31 crc kubenswrapper[4622]: I1126 11:12:31.705344 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:31 crc kubenswrapper[4622]: E1126 11:12:31.705457 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:32 crc kubenswrapper[4622]: I1126 11:12:32.705480 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:32 crc kubenswrapper[4622]: E1126 11:12:32.705797 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:32 crc kubenswrapper[4622]: I1126 11:12:32.705532 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:32 crc kubenswrapper[4622]: I1126 11:12:32.705532 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:32 crc kubenswrapper[4622]: E1126 11:12:32.705878 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:32 crc kubenswrapper[4622]: E1126 11:12:32.705936 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.097176 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/1.log" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.097663 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/0.log" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.097712 4622 generic.go:334] "Generic (PLEG): container finished" podID="fc4efcee-b872-406d-a694-3572222a8dfc" containerID="56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0" exitCode=1 Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.097739 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerDied","Data":"56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0"} Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.097770 4622 scope.go:117] "RemoveContainer" containerID="4a25bfee2aca57433d2cb4c20781cfaa4462f3b5997a1830daae659c71f3916c" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.098048 4622 scope.go:117] "RemoveContainer" containerID="56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0" Nov 26 11:12:33 crc kubenswrapper[4622]: E1126 11:12:33.098180 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vmw42_openshift-multus(fc4efcee-b872-406d-a694-3572222a8dfc)\"" pod="openshift-multus/multus-vmw42" podUID="fc4efcee-b872-406d-a694-3572222a8dfc" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.111335 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7hkcg" podStartSLOduration=95.111320368 podStartE2EDuration="1m35.111320368s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:15.066726735 +0000 UTC m=+94.657938257" watchObservedRunningTime="2025-11-26 11:12:33.111320368 +0000 UTC m=+112.702531890" Nov 26 11:12:33 crc kubenswrapper[4622]: I1126 11:12:33.704974 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:33 crc kubenswrapper[4622]: E1126 11:12:33.705090 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:34 crc kubenswrapper[4622]: I1126 11:12:34.101137 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/1.log" Nov 26 11:12:34 crc kubenswrapper[4622]: I1126 11:12:34.705645 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:34 crc kubenswrapper[4622]: I1126 11:12:34.705696 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:34 crc kubenswrapper[4622]: E1126 11:12:34.705788 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:34 crc kubenswrapper[4622]: I1126 11:12:34.705805 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:34 crc kubenswrapper[4622]: I1126 11:12:34.706431 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:12:34 crc kubenswrapper[4622]: E1126 11:12:34.705864 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:34 crc kubenswrapper[4622]: E1126 11:12:34.706577 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.104605 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/3.log" Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.106463 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerStarted","Data":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.106803 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.126844 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podStartSLOduration=96.126825621 podStartE2EDuration="1m36.126825621s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:35.126688642 +0000 UTC m=+114.717900164" watchObservedRunningTime="2025-11-26 11:12:35.126825621 +0000 UTC m=+114.718037143" Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.327869 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-z78ph"] Nov 26 11:12:35 crc kubenswrapper[4622]: I1126 11:12:35.327985 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:35 crc kubenswrapper[4622]: E1126 11:12:35.328053 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:36 crc kubenswrapper[4622]: I1126 11:12:36.705314 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:36 crc kubenswrapper[4622]: E1126 11:12:36.705633 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:36 crc kubenswrapper[4622]: I1126 11:12:36.705343 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:36 crc kubenswrapper[4622]: I1126 11:12:36.705314 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:36 crc kubenswrapper[4622]: E1126 11:12:36.705692 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:36 crc kubenswrapper[4622]: I1126 11:12:36.705384 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:36 crc kubenswrapper[4622]: E1126 11:12:36.705853 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:36 crc kubenswrapper[4622]: E1126 11:12:36.705945 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:38 crc kubenswrapper[4622]: I1126 11:12:38.706108 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:38 crc kubenswrapper[4622]: I1126 11:12:38.706111 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:38 crc kubenswrapper[4622]: I1126 11:12:38.706666 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:38 crc kubenswrapper[4622]: I1126 11:12:38.706891 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:38 crc kubenswrapper[4622]: E1126 11:12:38.706903 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:38 crc kubenswrapper[4622]: E1126 11:12:38.707086 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:38 crc kubenswrapper[4622]: E1126 11:12:38.707177 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:38 crc kubenswrapper[4622]: E1126 11:12:38.707229 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:40 crc kubenswrapper[4622]: I1126 11:12:40.705369 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:40 crc kubenswrapper[4622]: I1126 11:12:40.705370 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:40 crc kubenswrapper[4622]: I1126 11:12:40.705980 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.706251 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:40 crc kubenswrapper[4622]: I1126 11:12:40.706262 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.706324 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.706431 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.706557 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.759044 4622 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 26 11:12:40 crc kubenswrapper[4622]: E1126 11:12:40.778057 4622 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 26 11:12:42 crc kubenswrapper[4622]: I1126 11:12:42.705585 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:42 crc kubenswrapper[4622]: I1126 11:12:42.705585 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:42 crc kubenswrapper[4622]: E1126 11:12:42.706278 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:42 crc kubenswrapper[4622]: I1126 11:12:42.705620 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:42 crc kubenswrapper[4622]: E1126 11:12:42.706339 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:42 crc kubenswrapper[4622]: I1126 11:12:42.705603 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:42 crc kubenswrapper[4622]: E1126 11:12:42.706399 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:42 crc kubenswrapper[4622]: E1126 11:12:42.706519 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:43 crc kubenswrapper[4622]: I1126 11:12:43.763924 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:12:44 crc kubenswrapper[4622]: I1126 11:12:44.705895 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:44 crc kubenswrapper[4622]: I1126 11:12:44.705933 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:44 crc kubenswrapper[4622]: I1126 11:12:44.705977 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:44 crc kubenswrapper[4622]: E1126 11:12:44.706075 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:44 crc kubenswrapper[4622]: I1126 11:12:44.706089 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:44 crc kubenswrapper[4622]: E1126 11:12:44.706180 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:44 crc kubenswrapper[4622]: E1126 11:12:44.706239 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:44 crc kubenswrapper[4622]: E1126 11:12:44.706274 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:45 crc kubenswrapper[4622]: I1126 11:12:45.706274 4622 scope.go:117] "RemoveContainer" containerID="56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0" Nov 26 11:12:45 crc kubenswrapper[4622]: E1126 11:12:45.779061 4622 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.132744 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/1.log" Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.132941 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerStarted","Data":"00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43"} Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.705619 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.705644 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.705649 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:46 crc kubenswrapper[4622]: I1126 11:12:46.705624 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:46 crc kubenswrapper[4622]: E1126 11:12:46.705725 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:46 crc kubenswrapper[4622]: E1126 11:12:46.705842 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:46 crc kubenswrapper[4622]: E1126 11:12:46.705858 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:46 crc kubenswrapper[4622]: E1126 11:12:46.705891 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:48 crc kubenswrapper[4622]: I1126 11:12:48.705860 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:48 crc kubenswrapper[4622]: E1126 11:12:48.705990 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:48 crc kubenswrapper[4622]: I1126 11:12:48.706194 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:48 crc kubenswrapper[4622]: E1126 11:12:48.706237 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:48 crc kubenswrapper[4622]: I1126 11:12:48.706368 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:48 crc kubenswrapper[4622]: E1126 11:12:48.706410 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:48 crc kubenswrapper[4622]: I1126 11:12:48.706541 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:48 crc kubenswrapper[4622]: E1126 11:12:48.706592 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:50 crc kubenswrapper[4622]: I1126 11:12:50.705524 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:50 crc kubenswrapper[4622]: I1126 11:12:50.705564 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:50 crc kubenswrapper[4622]: E1126 11:12:50.706298 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 26 11:12:50 crc kubenswrapper[4622]: I1126 11:12:50.706314 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:50 crc kubenswrapper[4622]: I1126 11:12:50.706448 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:50 crc kubenswrapper[4622]: E1126 11:12:50.706613 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-z78ph" podUID="f23a93f9-15cf-4dfd-802d-4b6bd04bbf81" Nov 26 11:12:50 crc kubenswrapper[4622]: E1126 11:12:50.706732 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 26 11:12:50 crc kubenswrapper[4622]: E1126 11:12:50.706746 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.705487 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.705530 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.705658 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.705713 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.707425 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.707548 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.707913 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.707959 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.708133 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 26 11:12:52 crc kubenswrapper[4622]: I1126 11:12:52.708445 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.634526 4622 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.659598 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.659931 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.660168 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vprnt"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.660626 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.660734 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.660967 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.663854 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.663992 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.664544 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.665520 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8g8bw"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.666004 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.666263 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.666615 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.666911 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zgbwd"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.667188 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.667320 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq7t5"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.667581 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.668224 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.672962 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.672990 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.673126 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.673327 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.673127 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.674260 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.680941 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681239 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681285 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681444 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681533 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681771 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681833 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.681774 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.682182 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.682212 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.682226 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.682315 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.682476 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.685897 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686016 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686035 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686116 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686120 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686153 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686384 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686594 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686679 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.686752 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.687213 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.687422 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.687537 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688010 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688130 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688208 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688384 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688475 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688583 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688601 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688659 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688729 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688809 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688881 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689068 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trgkr"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689379 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688882 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688904 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688929 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688955 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.688981 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689004 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689050 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689075 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689106 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.692437 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689129 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689219 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.692663 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689245 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689265 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689286 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689307 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689415 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689446 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689542 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.689656 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.691284 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.691312 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.693530 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cwk8q"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.693840 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.694133 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.695636 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.697436 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.698270 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.698397 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.699272 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.699629 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.699765 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.699777 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.699865 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700178 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700256 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700295 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700378 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700434 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700515 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700386 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700553 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700598 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700613 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700627 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700668 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.700930 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.702311 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.702838 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.712391 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.731778 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.732200 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.732378 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.732457 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.732483 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.734067 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.734409 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.734956 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.735434 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.735806 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.735857 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.736250 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.736294 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.736321 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.736747 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737000 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737286 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737460 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737598 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737002 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737246 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.737909 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p5szg"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.738281 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.738948 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.739273 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740771 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740817 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlh5m\" (UniqueName: \"kubernetes.io/projected/94200fbf-6965-4439-b5f7-8b00787317ea-kube-api-access-nlh5m\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740845 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-dir\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740878 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6zfv\" (UniqueName: \"kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740898 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-encryption-config\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740913 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740929 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-srv-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740959 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-profile-collector-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740978 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-service-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.740996 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00a6832e-e8ed-48ff-9b63-fe02a10d1561-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741011 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4rwl\" (UniqueName: \"kubernetes.io/projected/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-kube-api-access-m4rwl\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-client\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741039 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741057 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9hxg\" (UniqueName: \"kubernetes.io/projected/e9529ef9-5f68-4c96-8266-d5e4fb16f749-kube-api-access-w9hxg\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741075 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57hsg\" (UniqueName: \"kubernetes.io/projected/7dd22bb8-3b26-4a60-8321-5462640f7816-kube-api-access-57hsg\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741092 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741113 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d050f464-dde0-45b6-a227-25bea4eafe0b-proxy-tls\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741128 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741143 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-auth-proxy-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741157 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dd22bb8-3b26-4a60-8321-5462640f7816-machine-approver-tls\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741172 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00a6832e-e8ed-48ff-9b63-fe02a10d1561-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741187 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcpt4\" (UniqueName: \"kubernetes.io/projected/d050f464-dde0-45b6-a227-25bea4eafe0b-kube-api-access-mcpt4\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741205 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8prx\" (UniqueName: \"kubernetes.io/projected/00a6832e-e8ed-48ff-9b63-fe02a10d1561-kube-api-access-s8prx\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741232 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-config\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741248 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-serving-cert\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741263 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741278 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-client\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741304 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-policies\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741335 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741364 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741406 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94200fbf-6965-4439-b5f7-8b00787317ea-metrics-tls\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741515 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-serving-cert\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741559 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb277c5b-5dac-473c-a509-51016a9b13f4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741585 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn79r\" (UniqueName: \"kubernetes.io/projected/75e482b2-62c5-4e59-8523-e9e5c887d9db-kube-api-access-jn79r\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741600 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8svn7\" (UniqueName: \"kubernetes.io/projected/f8bc3e2f-5cfa-49da-863d-e8c611605186-kube-api-access-8svn7\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741625 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb277c5b-5dac-473c-a509-51016a9b13f4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741651 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bb277c5b-5dac-473c-a509-51016a9b13f4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.741671 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d050f464-dde0-45b6-a227-25bea4eafe0b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.742845 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.742936 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-w7zs4"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.743244 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.743258 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.743845 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.743880 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.743954 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-5lz6m"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744127 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744224 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744248 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744318 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744403 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744429 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744483 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744536 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744627 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744651 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744731 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744928 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.744987 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.745160 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.745476 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.745660 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.745797 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.747541 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p78g4"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.747925 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.747957 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.748610 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.749079 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.749223 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.749567 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.749975 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.750470 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.750681 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98snv"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.751030 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.751639 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4x7nf"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.751981 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.752157 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.752684 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.757163 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.761618 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.764560 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.768487 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vprnt"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.772078 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.773409 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq7t5"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.774332 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.775727 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.776059 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.776926 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-n8mbn"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.777719 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.778150 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.778906 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.780006 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.780907 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zgbwd"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.781655 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.781829 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.782766 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.784703 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.784741 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8g8bw"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.785370 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p5szg"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.786303 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.788496 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98snv"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.790119 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.791959 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.792425 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.793717 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cwk8q"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.794609 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-5lz6m"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.795515 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trgkr"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.796402 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.797271 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.798206 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.799418 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.799931 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.800964 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.801198 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.801925 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.802848 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p78g4"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.803660 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4x7nf"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.804540 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.805534 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.807817 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-swz8p"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.808326 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kkvq6"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.809064 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-swz8p"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.809202 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.809212 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.810626 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kkvq6"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.821312 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.841646 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842008 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bb277c5b-5dac-473c-a509-51016a9b13f4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842039 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d050f464-dde0-45b6-a227-25bea4eafe0b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842063 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842084 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlh5m\" (UniqueName: \"kubernetes.io/projected/94200fbf-6965-4439-b5f7-8b00787317ea-kube-api-access-nlh5m\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842101 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-dir\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842133 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6zfv\" (UniqueName: \"kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842148 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-encryption-config\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842164 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842164 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-dir\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-srv-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842203 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-profile-collector-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842218 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-service-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842233 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00a6832e-e8ed-48ff-9b63-fe02a10d1561-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842250 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4rwl\" (UniqueName: \"kubernetes.io/projected/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-kube-api-access-m4rwl\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842264 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-client\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842279 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842293 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9hxg\" (UniqueName: \"kubernetes.io/projected/e9529ef9-5f68-4c96-8266-d5e4fb16f749-kube-api-access-w9hxg\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842310 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57hsg\" (UniqueName: \"kubernetes.io/projected/7dd22bb8-3b26-4a60-8321-5462640f7816-kube-api-access-57hsg\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842328 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842357 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d050f464-dde0-45b6-a227-25bea4eafe0b-proxy-tls\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842376 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842393 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00a6832e-e8ed-48ff-9b63-fe02a10d1561-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842409 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-auth-proxy-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842423 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dd22bb8-3b26-4a60-8321-5462640f7816-machine-approver-tls\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842440 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcpt4\" (UniqueName: \"kubernetes.io/projected/d050f464-dde0-45b6-a227-25bea4eafe0b-kube-api-access-mcpt4\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842459 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8prx\" (UniqueName: \"kubernetes.io/projected/00a6832e-e8ed-48ff-9b63-fe02a10d1561-kube-api-access-s8prx\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842479 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842497 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-config\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842539 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-serving-cert\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-client\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842578 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-policies\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842608 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842624 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842638 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94200fbf-6965-4439-b5f7-8b00787317ea-metrics-tls\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842654 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-serving-cert\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842670 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb277c5b-5dac-473c-a509-51016a9b13f4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842687 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn79r\" (UniqueName: \"kubernetes.io/projected/75e482b2-62c5-4e59-8523-e9e5c887d9db-kube-api-access-jn79r\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842704 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8svn7\" (UniqueName: \"kubernetes.io/projected/f8bc3e2f-5cfa-49da-863d-e8c611605186-kube-api-access-8svn7\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842720 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb277c5b-5dac-473c-a509-51016a9b13f4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.843158 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d050f464-dde0-45b6-a227-25bea4eafe0b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.842701 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.843608 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844009 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844094 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-service-ca\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844297 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00a6832e-e8ed-48ff-9b63-fe02a10d1561-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844334 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844562 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9529ef9-5f68-4c96-8266-d5e4fb16f749-config\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.844816 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8bc3e2f-5cfa-49da-863d-e8c611605186-audit-policies\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.845207 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb277c5b-5dac-473c-a509-51016a9b13f4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.847815 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-serving-cert\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.847817 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00a6832e-e8ed-48ff-9b63-fe02a10d1561-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.847919 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94200fbf-6965-4439-b5f7-8b00787317ea-metrics-tls\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848119 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb277c5b-5dac-473c-a509-51016a9b13f4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848121 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dd22bb8-3b26-4a60-8321-5462640f7816-machine-approver-tls\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848212 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-etcd-client\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848227 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-encryption-config\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848320 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bc3e2f-5cfa-49da-863d-e8c611605186-serving-cert\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.848722 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e9529ef9-5f68-4c96-8266-d5e4fb16f749-etcd-client\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.862103 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.869286 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9vzz6"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.872153 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.876900 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9vzz6"] Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.881611 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.901322 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.904740 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dd22bb8-3b26-4a60-8321-5462640f7816-auth-proxy-config\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.921483 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.941521 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.961969 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.981442 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 26 11:12:54 crc kubenswrapper[4622]: I1126 11:12:54.986459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.001676 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.021446 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.024096 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.042083 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.061708 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.082242 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.101267 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.121390 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.161734 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.182064 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.186775 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-profile-collector-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.206125 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.221442 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.241311 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.261995 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.266874 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d050f464-dde0-45b6-a227-25bea4eafe0b-proxy-tls\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.281259 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.301809 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.322382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.341911 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.362024 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.366365 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75e482b2-62c5-4e59-8523-e9e5c887d9db-srv-cert\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.382061 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.401538 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.421935 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.442213 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.462184 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.481837 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.501723 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.507907 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.522000 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.549292 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.556002 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.561545 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.582052 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.622303 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.641690 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.661281 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.682382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.702230 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.722390 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.742084 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.760517 4622 request.go:700] Waited for 1.016880867s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.762084 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.781666 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.801196 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.821551 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.842036 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.861436 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.882001 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.901747 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.922254 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.947993 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.961443 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 26 11:12:55 crc kubenswrapper[4622]: I1126 11:12:55.981410 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.002086 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.021551 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.041267 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.061980 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.081427 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.101672 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.121749 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.141238 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.163748 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.181209 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.201580 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.221762 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.242158 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.261217 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.281796 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.302027 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.325485 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.341109 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.361491 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.381594 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.401164 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.421352 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.441478 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.462093 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.481457 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.502156 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.521464 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.542025 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.561367 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.581428 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.602081 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.621411 4622 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.642065 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.674410 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bb277c5b-5dac-473c-a509-51016a9b13f4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kgngw\" (UID: \"bb277c5b-5dac-473c-a509-51016a9b13f4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.693878 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlh5m\" (UniqueName: \"kubernetes.io/projected/94200fbf-6965-4439-b5f7-8b00787317ea-kube-api-access-nlh5m\") pod \"dns-operator-744455d44c-zgbwd\" (UID: \"94200fbf-6965-4439-b5f7-8b00787317ea\") " pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.713044 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6zfv\" (UniqueName: \"kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv\") pod \"marketplace-operator-79b997595-vgnvk\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.732934 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9hxg\" (UniqueName: \"kubernetes.io/projected/e9529ef9-5f68-4c96-8266-d5e4fb16f749-kube-api-access-w9hxg\") pod \"etcd-operator-b45778765-mq7t5\" (UID: \"e9529ef9-5f68-4c96-8266-d5e4fb16f749\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.753174 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57hsg\" (UniqueName: \"kubernetes.io/projected/7dd22bb8-3b26-4a60-8321-5462640f7816-kube-api-access-57hsg\") pod \"machine-approver-56656f9798-6zjxz\" (UID: \"7dd22bb8-3b26-4a60-8321-5462640f7816\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.772644 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8svn7\" (UniqueName: \"kubernetes.io/projected/f8bc3e2f-5cfa-49da-863d-e8c611605186-kube-api-access-8svn7\") pod \"apiserver-7bbb656c7d-dscwx\" (UID: \"f8bc3e2f-5cfa-49da-863d-e8c611605186\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.780473 4622 request.go:700] Waited for 1.936998853s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/olm-operator-serviceaccount/token Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.793243 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn79r\" (UniqueName: \"kubernetes.io/projected/75e482b2-62c5-4e59-8523-e9e5c887d9db-kube-api-access-jn79r\") pod \"catalog-operator-68c6474976-zglsq\" (UID: \"75e482b2-62c5-4e59-8523-e9e5c887d9db\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.813971 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8prx\" (UniqueName: \"kubernetes.io/projected/00a6832e-e8ed-48ff-9b63-fe02a10d1561-kube-api-access-s8prx\") pod \"kube-storage-version-migrator-operator-b67b599dd-6gns5\" (UID: \"00a6832e-e8ed-48ff-9b63-fe02a10d1561\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.834085 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcpt4\" (UniqueName: \"kubernetes.io/projected/d050f464-dde0-45b6-a227-25bea4eafe0b-kube-api-access-mcpt4\") pod \"machine-config-controller-84d6567774-6fsv7\" (UID: \"d050f464-dde0-45b6-a227-25bea4eafe0b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.847118 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.852750 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4rwl\" (UniqueName: \"kubernetes.io/projected/2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1-kube-api-access-m4rwl\") pod \"openshift-apiserver-operator-796bbdcf4f-hpqsn\" (UID: \"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.857612 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.862711 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.868271 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.874382 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.883380 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.903749 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.906655 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.933067 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.945961 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965268 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-trusted-ca\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965294 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965316 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965333 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965361 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965376 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-serving-cert\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965396 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-images\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965440 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6aca4cf-35d0-47a4-9476-9a6888f31efb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965457 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965471 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965484 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-image-import-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965512 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgmq7\" (UniqueName: \"kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965530 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad723c1b-c1ad-40e9-9527-a177e0f01117-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965745 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-serving-cert\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965819 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965853 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965873 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4rsv\" (UniqueName: \"kubernetes.io/projected/3f6822df-f9db-480d-bf69-58c39f017ccc-kube-api-access-w4rsv\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965889 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965929 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-serving-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.965959 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966006 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966023 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966041 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a55ea739-d4ed-4594-9a3d-aca6728c0618-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966079 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcksl\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-kube-api-access-kcksl\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966097 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966214 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966255 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966273 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6415e96-60f9-4d4a-8476-c39c404ba62d-metrics-tls\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966289 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966332 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-config\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966361 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966377 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad723c1b-c1ad-40e9-9527-a177e0f01117-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966405 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966454 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6415e96-60f9-4d4a-8476-c39c404ba62d-trusted-ca\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966477 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966533 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f6822df-f9db-480d-bf69-58c39f017ccc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966550 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966564 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966579 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjtl7\" (UniqueName: \"kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966593 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-srv-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966606 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhfs8\" (UniqueName: \"kubernetes.io/projected/4c8189e8-b292-48af-8a28-23021c696ba5-kube-api-access-nhfs8\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966620 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-config\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966634 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4srz\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966649 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rjcn\" (UniqueName: \"kubernetes.io/projected/a55ea739-d4ed-4594-9a3d-aca6728c0618-kube-api-access-5rjcn\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966665 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8r22\" (UniqueName: \"kubernetes.io/projected/f6aca4cf-35d0-47a4-9476-9a6888f31efb-kube-api-access-w8r22\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966681 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966694 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-config\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966708 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966727 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966741 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aca4cf-35d0-47a4-9476-9a6888f31efb-serving-cert\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966754 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966769 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966782 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966798 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/500dd093-4153-433e-b01b-fd8dfa5622f6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966812 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966840 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-audit\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966854 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966867 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/500dd093-4153-433e-b01b-fd8dfa5622f6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966886 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtd4w\" (UniqueName: \"kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966901 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c8189e8-b292-48af-8a28-23021c696ba5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966915 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966928 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-node-pullsecrets\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966940 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-encryption-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966952 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966965 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966979 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjfll\" (UniqueName: \"kubernetes.io/projected/d74ae43c-85c3-42b6-956b-dca218e79bb0-kube-api-access-bjfll\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.966994 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-client\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967007 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967020 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcw9t\" (UniqueName: \"kubernetes.io/projected/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-kube-api-access-xcw9t\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967043 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp7bc\" (UniqueName: \"kubernetes.io/projected/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-kube-api-access-tp7bc\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967057 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967071 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-audit-dir\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967085 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967099 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967114 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967135 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967148 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqhdw\" (UniqueName: \"kubernetes.io/projected/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-kube-api-access-hqhdw\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967293 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm2lm\" (UniqueName: \"kubernetes.io/projected/e202504f-0b8b-4709-b211-031ebf81cc77-kube-api-access-xm2lm\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967369 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967427 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad723c1b-c1ad-40e9-9527-a177e0f01117-config\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.967460 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99g8l\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-kube-api-access-99g8l\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:56 crc kubenswrapper[4622]: E1126 11:12:56.967864 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.467849021 +0000 UTC m=+137.059060543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:56 crc kubenswrapper[4622]: I1126 11:12:56.980857 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq7t5"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.000220 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.005718 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.012555 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.020488 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zgbwd"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068244 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068443 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cee0e42c-d907-4529-892f-ee830f463490-metrics-tls\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068481 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068522 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068538 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068556 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-serving-cert\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068573 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z4vz\" (UniqueName: \"kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068593 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6aca4cf-35d0-47a4-9476-9a6888f31efb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068608 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-images\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068628 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068643 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068657 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-image-import-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068688 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcnhp\" (UniqueName: \"kubernetes.io/projected/cee0e42c-d907-4529-892f-ee830f463490-kube-api-access-gcnhp\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068706 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npkhw\" (UniqueName: \"kubernetes.io/projected/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-kube-api-access-npkhw\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068725 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgmq7\" (UniqueName: \"kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068739 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad723c1b-c1ad-40e9-9527-a177e0f01117-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068757 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-serving-cert\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068773 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-mountpoint-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068787 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-registration-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068803 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mzhl\" (UniqueName: \"kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068819 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068854 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068869 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-serving-cert\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068883 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vjkt\" (UniqueName: \"kubernetes.io/projected/83f652a7-5618-40ee-8196-ddd4adba4175-kube-api-access-4vjkt\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068897 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-socket-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068920 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068935 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4rsv\" (UniqueName: \"kubernetes.io/projected/3f6822df-f9db-480d-bf69-58c39f017ccc-kube-api-access-w4rsv\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068952 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068968 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cee0e42c-d907-4529-892f-ee830f463490-config-volume\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068984 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f4e9e05-70fe-443d-9747-ea0849db83d4-proxy-tls\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.068998 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-serving-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069012 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069025 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/83f652a7-5618-40ee-8196-ddd4adba4175-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069050 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069064 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069090 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069105 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069122 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a55ea739-d4ed-4594-9a3d-aca6728c0618-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069142 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcksl\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-kube-api-access-kcksl\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069157 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069174 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxff6\" (UniqueName: \"kubernetes.io/projected/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-kube-api-access-jxff6\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069196 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069211 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vzgf\" (UniqueName: \"kubernetes.io/projected/8ef8cb79-5fb6-4438-bc49-4a88a304557d-kube-api-access-2vzgf\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069227 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-config\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069242 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069256 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6415e96-60f9-4d4a-8476-c39c404ba62d-metrics-tls\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069270 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmphn\" (UniqueName: \"kubernetes.io/projected/b4df4382-b969-46c2-b5f7-61631a2bfc06-kube-api-access-gmphn\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069285 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069298 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-key\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069312 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-config\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069326 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-default-certificate\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069366 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-csi-data-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069390 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069404 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g8n9\" (UniqueName: \"kubernetes.io/projected/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-kube-api-access-2g8n9\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069426 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad723c1b-c1ad-40e9-9527-a177e0f01117-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069441 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069454 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6415e96-60f9-4d4a-8476-c39c404ba62d-trusted-ca\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069468 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069519 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f6822df-f9db-480d-bf69-58c39f017ccc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069534 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-images\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069566 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069581 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qxdt\" (UniqueName: \"kubernetes.io/projected/ca03ec70-790c-40c8-a009-dfa4946f6d06-kube-api-access-8qxdt\") pod \"migrator-59844c95c7-tq627\" (UID: \"ca03ec70-790c-40c8-a009-dfa4946f6d06\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069596 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-certs\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.069618 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.071309 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-config\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.071320 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.071822 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072084 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjtl7\" (UniqueName: \"kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072107 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-srv-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072138 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhfs8\" (UniqueName: \"kubernetes.io/projected/4c8189e8-b292-48af-8a28-23021c696ba5-kube-api-access-nhfs8\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072165 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-config\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072182 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4srz\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072223 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rjcn\" (UniqueName: \"kubernetes.io/projected/a55ea739-d4ed-4594-9a3d-aca6728c0618-kube-api-access-5rjcn\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072238 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8r22\" (UniqueName: \"kubernetes.io/projected/f6aca4cf-35d0-47a4-9476-9a6888f31efb-kube-api-access-w8r22\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072255 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072282 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-plugins-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072298 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072312 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-config\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072328 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072363 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072377 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aca4cf-35d0-47a4-9476-9a6888f31efb-serving-cert\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072393 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072416 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072439 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072462 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/500dd093-4153-433e-b01b-fd8dfa5622f6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072477 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-stats-auth\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072491 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4df4382-b969-46c2-b5f7-61631a2bfc06-serving-cert\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072532 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072557 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-audit\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072572 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-apiservice-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072612 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-config\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072627 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072641 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072655 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-cabundle\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072672 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072688 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/500dd093-4153-433e-b01b-fd8dfa5622f6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072709 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072724 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtd4w\" (UniqueName: \"kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072740 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072765 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km2n8\" (UniqueName: \"kubernetes.io/projected/7dae3565-6d26-4f06-a148-66466f9ec9bb-kube-api-access-km2n8\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072790 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-encryption-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072806 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072822 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c8189e8-b292-48af-8a28-23021c696ba5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072839 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072853 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-node-pullsecrets\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072867 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjfll\" (UniqueName: \"kubernetes.io/projected/d74ae43c-85c3-42b6-956b-dca218e79bb0-kube-api-access-bjfll\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072915 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-client\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072929 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7dae3565-6d26-4f06-a148-66466f9ec9bb-tmpfs\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072954 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072968 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcw9t\" (UniqueName: \"kubernetes.io/projected/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-kube-api-access-xcw9t\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072984 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp7bc\" (UniqueName: \"kubernetes.io/projected/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-kube-api-access-tp7bc\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.072998 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-service-ca-bundle\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.073013 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.073027 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-audit-dir\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.073607 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.573593718 +0000 UTC m=+137.164805240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.074676 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-config\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.074844 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.074925 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-audit-dir\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.075123 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.075623 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.075693 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.075880 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.076408 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.076458 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.076903 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-srv-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.076904 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.077052 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-config\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.077229 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a55ea739-d4ed-4594-9a3d-aca6728c0618-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.077239 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f6822df-f9db-480d-bf69-58c39f017ccc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.077465 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-serving-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.077641 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e202504f-0b8b-4709-b211-031ebf81cc77-node-pullsecrets\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078131 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078512 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6aca4cf-35d0-47a4-9476-9a6888f31efb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078738 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6415e96-60f9-4d4a-8476-c39c404ba62d-trusted-ca\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078891 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078938 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.078966 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw6xn\" (UniqueName: \"kubernetes.io/projected/b144a741-5958-497e-9b68-8e569bfca541-kube-api-access-gw6xn\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079001 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079020 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079051 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079056 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079199 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqhdw\" (UniqueName: \"kubernetes.io/projected/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-kube-api-access-hqhdw\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079222 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079231 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-images\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079224 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-webhook-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079269 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-metrics-certs\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079306 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db5d2\" (UniqueName: \"kubernetes.io/projected/63b1178e-6e77-4714-96eb-7dc978789aaa-kube-api-access-db5d2\") pod \"downloads-7954f5f757-5lz6m\" (UID: \"63b1178e-6e77-4714-96eb-7dc978789aaa\") " pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079328 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm2lm\" (UniqueName: \"kubernetes.io/projected/e202504f-0b8b-4709-b211-031ebf81cc77-kube-api-access-xm2lm\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079371 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfgdl\" (UniqueName: \"kubernetes.io/projected/4f4e9e05-70fe-443d-9747-ea0849db83d4-kube-api-access-tfgdl\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079378 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079389 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-node-bootstrap-token\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079406 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-service-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079430 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079450 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad723c1b-c1ad-40e9-9527-a177e0f01117-config\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079466 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b144a741-5958-497e-9b68-8e569bfca541-cert\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079589 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99g8l\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-kube-api-access-99g8l\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079622 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-trusted-ca\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079641 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079658 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxsdk\" (UniqueName: \"kubernetes.io/projected/8c07900a-9014-44c3-b2a4-358a5a50dc09-kube-api-access-qxsdk\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.079776 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-serving-cert\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080168 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080294 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080626 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad723c1b-c1ad-40e9-9527-a177e0f01117-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080702 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6415e96-60f9-4d4a-8476-c39c404ba62d-metrics-tls\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080807 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080933 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.080961 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.081132 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.081324 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.581313174 +0000 UTC m=+137.172524697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.082921 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-etcd-client\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.085120 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aca4cf-35d0-47a4-9476-9a6888f31efb-serving-cert\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.085239 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/500dd093-4153-433e-b01b-fd8dfa5622f6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.085405 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.085455 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.085991 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.088631 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.088711 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d74ae43c-85c3-42b6-956b-dca218e79bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.088912 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c8189e8-b292-48af-8a28-23021c696ba5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.089696 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.089754 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-image-import-ca\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.089858 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.090238 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad723c1b-c1ad-40e9-9527-a177e0f01117-config\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.090264 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.090327 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-serving-cert\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.091986 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.092197 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.093797 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-trusted-ca\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.095260 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-audit\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.095496 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e202504f-0b8b-4709-b211-031ebf81cc77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.096135 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e202504f-0b8b-4709-b211-031ebf81cc77-encryption-config\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.096212 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.098394 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.100259 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/500dd093-4153-433e-b01b-fd8dfa5622f6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.116273 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhfs8\" (UniqueName: \"kubernetes.io/projected/4c8189e8-b292-48af-8a28-23021c696ba5-kube-api-access-nhfs8\") pod \"control-plane-machine-set-operator-78cbb6b69f-7qcfs\" (UID: \"4c8189e8-b292-48af-8a28-23021c696ba5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.136011 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4srz\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.141606 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.154418 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rjcn\" (UniqueName: \"kubernetes.io/projected/a55ea739-d4ed-4594-9a3d-aca6728c0618-kube-api-access-5rjcn\") pod \"multus-admission-controller-857f4d67dd-p5szg\" (UID: \"a55ea739-d4ed-4594-9a3d-aca6728c0618\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.169621 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" event={"ID":"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1","Type":"ContainerStarted","Data":"1cf837416df42b010a788d0c7c24319303acebb023f14e2bd447c481a8fc8570"} Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.174581 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" event={"ID":"7dd22bb8-3b26-4a60-8321-5462640f7816","Type":"ContainerStarted","Data":"be9360a8981228a11f4d79054c8e3cfdd63e3f0022a609b1fba2526994602259"} Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.176697 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" event={"ID":"e9529ef9-5f68-4c96-8266-d5e4fb16f749","Type":"ContainerStarted","Data":"b0b47563b7c18cc16c96766d1370cd97de36e023b140bf84ffe9f3861107978e"} Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.178150 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" event={"ID":"94200fbf-6965-4439-b5f7-8b00787317ea","Type":"ContainerStarted","Data":"5715fabfb8edbec614c4ce19767fe1463300792a94f5f05fb60cda8e48984120"} Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.182585 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.182752 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.182781 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-plugins-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.182826 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4df4382-b969-46c2-b5f7-61631a2bfc06-serving-cert\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.182847 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-stats-auth\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.183031 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8r22\" (UniqueName: \"kubernetes.io/projected/f6aca4cf-35d0-47a4-9476-9a6888f31efb-kube-api-access-w8r22\") pod \"openshift-config-operator-7777fb866f-trgkr\" (UID: \"f6aca4cf-35d0-47a4-9476-9a6888f31efb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.183044 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-plugins-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.183411 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.183623 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.683594174 +0000 UTC m=+137.274805696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184045 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-apiservice-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184116 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-config\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184141 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184157 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-cabundle\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184174 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184189 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184206 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184238 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km2n8\" (UniqueName: \"kubernetes.io/projected/7dae3565-6d26-4f06-a148-66466f9ec9bb-kube-api-access-km2n8\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184272 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7dae3565-6d26-4f06-a148-66466f9ec9bb-tmpfs\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184299 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-service-ca-bundle\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184321 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw6xn\" (UniqueName: \"kubernetes.io/projected/b144a741-5958-497e-9b68-8e569bfca541-kube-api-access-gw6xn\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184365 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-webhook-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184380 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-metrics-certs\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184406 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db5d2\" (UniqueName: \"kubernetes.io/projected/63b1178e-6e77-4714-96eb-7dc978789aaa-kube-api-access-db5d2\") pod \"downloads-7954f5f757-5lz6m\" (UID: \"63b1178e-6e77-4714-96eb-7dc978789aaa\") " pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184433 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-node-bootstrap-token\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184448 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-service-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184470 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184487 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfgdl\" (UniqueName: \"kubernetes.io/projected/4f4e9e05-70fe-443d-9747-ea0849db83d4-kube-api-access-tfgdl\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184518 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b144a741-5958-497e-9b68-8e569bfca541-cert\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184545 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxsdk\" (UniqueName: \"kubernetes.io/projected/8c07900a-9014-44c3-b2a4-358a5a50dc09-kube-api-access-qxsdk\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184565 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cee0e42c-d907-4529-892f-ee830f463490-metrics-tls\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184584 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z4vz\" (UniqueName: \"kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184608 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcnhp\" (UniqueName: \"kubernetes.io/projected/cee0e42c-d907-4529-892f-ee830f463490-kube-api-access-gcnhp\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184627 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npkhw\" (UniqueName: \"kubernetes.io/projected/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-kube-api-access-npkhw\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184648 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-mountpoint-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184662 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-registration-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184676 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mzhl\" (UniqueName: \"kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184698 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184713 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-serving-cert\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184729 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vjkt\" (UniqueName: \"kubernetes.io/projected/83f652a7-5618-40ee-8196-ddd4adba4175-kube-api-access-4vjkt\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184746 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-socket-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184772 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cee0e42c-d907-4529-892f-ee830f463490-config-volume\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184830 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f4e9e05-70fe-443d-9747-ea0849db83d4-proxy-tls\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184849 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/83f652a7-5618-40ee-8196-ddd4adba4175-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184874 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184895 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184910 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxff6\" (UniqueName: \"kubernetes.io/projected/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-kube-api-access-jxff6\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184928 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vzgf\" (UniqueName: \"kubernetes.io/projected/8ef8cb79-5fb6-4438-bc49-4a88a304557d-kube-api-access-2vzgf\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184941 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-config\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184962 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmphn\" (UniqueName: \"kubernetes.io/projected/b4df4382-b969-46c2-b5f7-61631a2bfc06-kube-api-access-gmphn\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.184986 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-key\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185002 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-default-certificate\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185027 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-csi-data-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185051 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g8n9\" (UniqueName: \"kubernetes.io/projected/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-kube-api-access-2g8n9\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185078 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-images\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185103 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qxdt\" (UniqueName: \"kubernetes.io/projected/ca03ec70-790c-40c8-a009-dfa4946f6d06-kube-api-access-8qxdt\") pod \"migrator-59844c95c7-tq627\" (UID: \"ca03ec70-790c-40c8-a009-dfa4946f6d06\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.185118 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-certs\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.186903 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4df4382-b969-46c2-b5f7-61631a2bfc06-serving-cert\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.188688 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-stats-auth\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.189111 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.189315 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-socket-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.189833 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cee0e42c-d907-4529-892f-ee830f463490-config-volume\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.190175 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-config\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.190242 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-apiservice-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.192628 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-csi-data-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.191366 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-key\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.191533 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-serving-cert\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.191585 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-mountpoint-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.191619 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8ef8cb79-5fb6-4438-bc49-4a88a304557d-registration-dir\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.192299 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.193367 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-config\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.193406 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.193775 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f4e9e05-70fe-443d-9747-ea0849db83d4-proxy-tls\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.193875 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4df4382-b969-46c2-b5f7-61631a2bfc06-service-ca-bundle\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.193975 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-default-certificate\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.194189 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.694176636 +0000 UTC m=+137.285388158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.194188 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.190520 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-certs\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.194534 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-service-ca-bundle\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.194864 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7dae3565-6d26-4f06-a148-66466f9ec9bb-tmpfs\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195087 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f4e9e05-70fe-443d-9747-ea0849db83d4-images\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195127 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/83f652a7-5618-40ee-8196-ddd4adba4175-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195275 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-signing-cabundle\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195318 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-metrics-certs\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195678 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.195685 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.196475 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.196956 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.197942 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b144a741-5958-497e-9b68-8e569bfca541-cert\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.198422 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cee0e42c-d907-4529-892f-ee830f463490-metrics-tls\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.198982 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.200828 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7dae3565-6d26-4f06-a148-66466f9ec9bb-webhook-cert\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.205136 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8c07900a-9014-44c3-b2a4-358a5a50dc09-node-bootstrap-token\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.214645 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.215166 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4rsv\" (UniqueName: \"kubernetes.io/projected/3f6822df-f9db-480d-bf69-58c39f017ccc-kube-api-access-w4rsv\") pod \"cluster-samples-operator-665b6dd947-9d7nf\" (UID: \"3f6822df-f9db-480d-bf69-58c39f017ccc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.225709 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.233319 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.238539 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.255703 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad723c1b-c1ad-40e9-9527-a177e0f01117-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bwskx\" (UID: \"ad723c1b-c1ad-40e9-9527-a177e0f01117\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.277906 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.278359 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgmq7\" (UniqueName: \"kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7\") pod \"console-f9d7485db-r2tzg\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.282780 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.283788 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.286318 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.286544 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.786523604 +0000 UTC m=+137.377735127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.287621 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.289544 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw"] Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.290174 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.790158835 +0000 UTC m=+137.381370358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.298847 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.301816 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjtl7\" (UniqueName: \"kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7\") pod \"oauth-openshift-558db77b4-b44gm\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: W1126 11:12:57.313160 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb277c5b_5dac_473c_a509_51016a9b13f4.slice/crio-10750bd3db13bc52be1bcf8671d9cb8890c25960196554e7696e64e7a09d6f21 WatchSource:0}: Error finding container 10750bd3db13bc52be1bcf8671d9cb8890c25960196554e7696e64e7a09d6f21: Status 404 returned error can't find the container with id 10750bd3db13bc52be1bcf8671d9cb8890c25960196554e7696e64e7a09d6f21 Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.317297 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.319254 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.338098 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fda35c7d-d0bc-4730-b21d-a66ff3b0f62e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-645bl\" (UID: \"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.355472 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcksl\" (UniqueName: \"kubernetes.io/projected/500dd093-4153-433e-b01b-fd8dfa5622f6-kube-api-access-kcksl\") pod \"cluster-image-registry-operator-dc59b4c8b-8vgv8\" (UID: \"500dd093-4153-433e-b01b-fd8dfa5622f6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.377600 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjfll\" (UniqueName: \"kubernetes.io/projected/d74ae43c-85c3-42b6-956b-dca218e79bb0-kube-api-access-bjfll\") pod \"olm-operator-6b444d44fb-4m9s2\" (UID: \"d74ae43c-85c3-42b6-956b-dca218e79bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.385955 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-trgkr"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.388287 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.392150 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.8921302 +0000 UTC m=+137.483341713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.399729 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqhdw\" (UniqueName: \"kubernetes.io/projected/f84bd52b-3da4-47a1-a4fc-1c72fa27d846-kube-api-access-hqhdw\") pod \"openshift-controller-manager-operator-756b6f6bc6-dg7cl\" (UID: \"f84bd52b-3da4-47a1-a4fc-1c72fa27d846\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.400348 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:12:57 crc kubenswrapper[4622]: W1126 11:12:57.408554 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6aca4cf_35d0_47a4_9476_9a6888f31efb.slice/crio-30d4d55aab95282fd90997cc34b05ff28b7e72f04499ecd87135dadb302a6329 WatchSource:0}: Error finding container 30d4d55aab95282fd90997cc34b05ff28b7e72f04499ecd87135dadb302a6329: Status 404 returned error can't find the container with id 30d4d55aab95282fd90997cc34b05ff28b7e72f04499ecd87135dadb302a6329 Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.413953 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm2lm\" (UniqueName: \"kubernetes.io/projected/e202504f-0b8b-4709-b211-031ebf81cc77-kube-api-access-xm2lm\") pod \"apiserver-76f77b778f-8g8bw\" (UID: \"e202504f-0b8b-4709-b211-031ebf81cc77\") " pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.417820 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.435749 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtd4w\" (UniqueName: \"kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w\") pod \"route-controller-manager-6576b87f9c-djf82\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.435840 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.461363 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcw9t\" (UniqueName: \"kubernetes.io/projected/ac45a607-6d21-475e-8f82-ed9bbcbb1f65-kube-api-access-xcw9t\") pod \"machine-api-operator-5694c8668f-vprnt\" (UID: \"ac45a607-6d21-475e-8f82-ed9bbcbb1f65\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.474227 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp7bc\" (UniqueName: \"kubernetes.io/projected/71fcfb8e-5a8a-44fe-9933-cee4d8c0012c-kube-api-access-tp7bc\") pod \"console-operator-58897d9998-cwk8q\" (UID: \"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c\") " pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.480610 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.482803 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.493211 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.493727 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:57.99371122 +0000 UTC m=+137.584922743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.498716 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.500725 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99g8l\" (UniqueName: \"kubernetes.io/projected/d6415e96-60f9-4d4a-8476-c39c404ba62d-kube-api-access-99g8l\") pod \"ingress-operator-5b745b69d9-69qr9\" (UID: \"d6415e96-60f9-4d4a-8476-c39c404ba62d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.505768 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.515799 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.527071 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.539147 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npkhw\" (UniqueName: \"kubernetes.io/projected/731a82fb-93f9-42df-8dc3-7e0e0937b5a9-kube-api-access-npkhw\") pod \"service-ca-9c57cc56f-4x7nf\" (UID: \"731a82fb-93f9-42df-8dc3-7e0e0937b5a9\") " pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.558019 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vjkt\" (UniqueName: \"kubernetes.io/projected/83f652a7-5618-40ee-8196-ddd4adba4175-kube-api-access-4vjkt\") pod \"package-server-manager-789f6589d5-zf22z\" (UID: \"83f652a7-5618-40ee-8196-ddd4adba4175\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.562272 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p5szg"] Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.569185 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" Nov 26 11:12:57 crc kubenswrapper[4622]: W1126 11:12:57.574227 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda55ea739_d4ed_4594_9a3d_aca6728c0618.slice/crio-f409d7597371a9d1543559534a69facebedd53049bca82df90042b21343adf7b WatchSource:0}: Error finding container f409d7597371a9d1543559534a69facebedd53049bca82df90042b21343adf7b: Status 404 returned error can't find the container with id f409d7597371a9d1543559534a69facebedd53049bca82df90042b21343adf7b Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.576639 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mzhl\" (UniqueName: \"kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl\") pod \"controller-manager-879f6c89f-g675c\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.576948 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.594388 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.594868 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.595425 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.095409421 +0000 UTC m=+137.686620944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.599064 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxff6\" (UniqueName: \"kubernetes.io/projected/1e710e37-8d2a-43f6-9e8f-7eb3233eb276-kube-api-access-jxff6\") pod \"router-default-5444994796-w7zs4\" (UID: \"1e710e37-8d2a-43f6-9e8f-7eb3233eb276\") " pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.625721 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vzgf\" (UniqueName: \"kubernetes.io/projected/8ef8cb79-5fb6-4438-bc49-4a88a304557d-kube-api-access-2vzgf\") pod \"csi-hostpathplugin-kkvq6\" (UID: \"8ef8cb79-5fb6-4438-bc49-4a88a304557d\") " pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.629271 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.651018 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmphn\" (UniqueName: \"kubernetes.io/projected/b4df4382-b969-46c2-b5f7-61631a2bfc06-kube-api-access-gmphn\") pod \"authentication-operator-69f744f599-p78g4\" (UID: \"b4df4382-b969-46c2-b5f7-61631a2bfc06\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.654725 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.679298 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.679556 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z4vz\" (UniqueName: \"kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz\") pod \"collect-profiles-29402580-2wnqz\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.682202 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxsdk\" (UniqueName: \"kubernetes.io/projected/8c07900a-9014-44c3-b2a4-358a5a50dc09-kube-api-access-qxsdk\") pod \"machine-config-server-n8mbn\" (UID: \"8c07900a-9014-44c3-b2a4-358a5a50dc09\") " pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.682483 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.683730 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.693079 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.696490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.696792 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.196780265 +0000 UTC m=+137.787991777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.699668 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-n8mbn" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.708037 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw6xn\" (UniqueName: \"kubernetes.io/projected/b144a741-5958-497e-9b68-8e569bfca541-kube-api-access-gw6xn\") pod \"ingress-canary-swz8p\" (UID: \"b144a741-5958-497e-9b68-8e569bfca541\") " pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.714563 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.725291 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcnhp\" (UniqueName: \"kubernetes.io/projected/cee0e42c-d907-4529-892f-ee830f463490-kube-api-access-gcnhp\") pod \"dns-default-9vzz6\" (UID: \"cee0e42c-d907-4529-892f-ee830f463490\") " pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.735374 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g8n9\" (UniqueName: \"kubernetes.io/projected/2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b-kube-api-access-2g8n9\") pod \"service-ca-operator-777779d784-98snv\" (UID: \"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.762223 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km2n8\" (UniqueName: \"kubernetes.io/projected/7dae3565-6d26-4f06-a148-66466f9ec9bb-kube-api-access-km2n8\") pod \"packageserver-d55dfcdfc-x7ccv\" (UID: \"7dae3565-6d26-4f06-a148-66466f9ec9bb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.780750 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qxdt\" (UniqueName: \"kubernetes.io/projected/ca03ec70-790c-40c8-a009-dfa4946f6d06-kube-api-access-8qxdt\") pod \"migrator-59844c95c7-tq627\" (UID: \"ca03ec70-790c-40c8-a009-dfa4946f6d06\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.798168 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.798577 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.298563266 +0000 UTC m=+137.889774788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.813783 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfgdl\" (UniqueName: \"kubernetes.io/projected/4f4e9e05-70fe-443d-9747-ea0849db83d4-kube-api-access-tfgdl\") pod \"machine-config-operator-74547568cd-w4wmd\" (UID: \"4f4e9e05-70fe-443d-9747-ea0849db83d4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.820773 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-swz8p" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.821214 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db5d2\" (UniqueName: \"kubernetes.io/projected/63b1178e-6e77-4714-96eb-7dc978789aaa-kube-api-access-db5d2\") pod \"downloads-7954f5f757-5lz6m\" (UID: \"63b1178e-6e77-4714-96eb-7dc978789aaa\") " pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.899424 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:57 crc kubenswrapper[4622]: E1126 11:12:57.899749 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.399735275 +0000 UTC m=+137.990946797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.924002 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.935125 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.944434 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.951165 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.960712 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.968799 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" Nov 26 11:12:57 crc kubenswrapper[4622]: I1126 11:12:57.972540 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.001005 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.001398 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.501383251 +0000 UTC m=+138.092594783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.016069 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9vzz6" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.018947 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8g8bw"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.083172 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.113018 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.113266 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.613255264 +0000 UTC m=+138.204466786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.124053 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cwk8q"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.164681 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.166908 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.200666 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.219208 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.219487 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.719473473 +0000 UTC m=+138.310684995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.229940 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" event={"ID":"2318ac1c-1c1e-4c0d-97ae-bdc94a8f6eb1","Type":"ContainerStarted","Data":"4e63bee0b9e3faebf414775750f4a015eccac5c0c431881db3b281b386fbf62d"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.238443 4622 generic.go:334] "Generic (PLEG): container finished" podID="f6aca4cf-35d0-47a4-9476-9a6888f31efb" containerID="15e4e1efd7e7889fbf1e107d991495a32891403e1690eb392f02b10783c817bd" exitCode=0 Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.238520 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" event={"ID":"f6aca4cf-35d0-47a4-9476-9a6888f31efb","Type":"ContainerDied","Data":"15e4e1efd7e7889fbf1e107d991495a32891403e1690eb392f02b10783c817bd"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.238550 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" event={"ID":"f6aca4cf-35d0-47a4-9476-9a6888f31efb","Type":"ContainerStarted","Data":"30d4d55aab95282fd90997cc34b05ff28b7e72f04499ecd87135dadb302a6329"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.244563 4622 generic.go:334] "Generic (PLEG): container finished" podID="f8bc3e2f-5cfa-49da-863d-e8c611605186" containerID="d3544373750d337c91dabbe0a9b532875db05acb7990c6226de124cbd62d53c9" exitCode=0 Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.244628 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" event={"ID":"f8bc3e2f-5cfa-49da-863d-e8c611605186","Type":"ContainerDied","Data":"d3544373750d337c91dabbe0a9b532875db05acb7990c6226de124cbd62d53c9"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.244655 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" event={"ID":"f8bc3e2f-5cfa-49da-863d-e8c611605186","Type":"ContainerStarted","Data":"d5eee4a3cf4e33b5e37fc873c5733e6c8e17af6a3459b8f8dd2a196e3b73761c"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.263092 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.273422 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" event={"ID":"d050f464-dde0-45b6-a227-25bea4eafe0b","Type":"ContainerStarted","Data":"bd5ac783e589d962f290e4a25d692e3265e74a5c5d8f5a7f2d31efb8504b90cc"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.273462 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" event={"ID":"d050f464-dde0-45b6-a227-25bea4eafe0b","Type":"ContainerStarted","Data":"ed08e932d1ab8600dab313e648e68a6f6f4d3e98ef4f64932be6b098cfa26a66"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.273474 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" event={"ID":"d050f464-dde0-45b6-a227-25bea4eafe0b","Type":"ContainerStarted","Data":"fca948d693da3659972ab30f637a5b93102bc53ef3284fc6f1b4385099460d39"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.283262 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" event={"ID":"94200fbf-6965-4439-b5f7-8b00787317ea","Type":"ContainerStarted","Data":"b5dc5329deb2d9f4f67674bc576211fbc60aa1db59e512de89c6b0dace9b1c3c"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.283295 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" event={"ID":"94200fbf-6965-4439-b5f7-8b00787317ea","Type":"ContainerStarted","Data":"e6c6eec9c9ffa96afe743fd837da5b18ec749425288d5f8fb8d5d70d9b140954"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.285575 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.298066 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.298109 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" event={"ID":"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992","Type":"ContainerStarted","Data":"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.298129 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" event={"ID":"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992","Type":"ContainerStarted","Data":"66decb7e0c3f3e3897de18c4306eab60a0293e9779d563871ca31eac6737d73f"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.298578 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.304233 4622 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vgnvk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.304294 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.311985 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" event={"ID":"75e482b2-62c5-4e59-8523-e9e5c887d9db","Type":"ContainerStarted","Data":"c07426ba3d26ab1eb6f7243535273f45f8d8b17d29b3a68cc39ab4f58239f3d8"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.312022 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" event={"ID":"75e482b2-62c5-4e59-8523-e9e5c887d9db","Type":"ContainerStarted","Data":"8efa2364dc36d02b8659ec5909dd8141d9c0baf79f122aa9f13b28b9e69ef24b"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.313650 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.332655 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.333001 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.832988843 +0000 UTC m=+138.424200355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.334966 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" event={"ID":"4c8189e8-b292-48af-8a28-23021c696ba5","Type":"ContainerStarted","Data":"7aa6cde85976a814c6c816ad980d241e4655eee7bc72ab4d9cecd2d839671939"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.335078 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.335097 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" event={"ID":"4c8189e8-b292-48af-8a28-23021c696ba5","Type":"ContainerStarted","Data":"b4617acff0c962ebb2b68adf3c80dc9c0fc5a764bef20e1810d5728b9b57a25d"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.351182 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-w7zs4" event={"ID":"1e710e37-8d2a-43f6-9e8f-7eb3233eb276","Type":"ContainerStarted","Data":"25458ff9b65003d6276ee9e7426f6265b3571285dd19a4176995c069cf5528f3"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.352866 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" event={"ID":"00a6832e-e8ed-48ff-9b63-fe02a10d1561","Type":"ContainerStarted","Data":"9de973c53f2a356626661e53192aecb2ffb2a63ea7ca01bfce810f94905d5101"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.352967 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" event={"ID":"00a6832e-e8ed-48ff-9b63-fe02a10d1561","Type":"ContainerStarted","Data":"f80079626f12d73eb38363091baec3a10e3a32abc1a71db7e9afe6c438426833"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.362421 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" event={"ID":"3f6822df-f9db-480d-bf69-58c39f017ccc","Type":"ContainerStarted","Data":"15e3d7f0c13d58449a5ab276781de94b3b68305384eb5bc2bd8d292342c63062"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.362534 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" event={"ID":"3f6822df-f9db-480d-bf69-58c39f017ccc","Type":"ContainerStarted","Data":"bdc38e3c3a83aa9fe8fa9e3103e26cdeaea42d9194c3034095576f0ac6de9e73"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.364685 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" event={"ID":"e9529ef9-5f68-4c96-8266-d5e4fb16f749","Type":"ContainerStarted","Data":"0f349a9085bc1d64c588e163dc7c47e9867138d5cc5374a1604ecdae45428128"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.368204 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" event={"ID":"bb277c5b-5dac-473c-a509-51016a9b13f4","Type":"ContainerStarted","Data":"43fa439f2dc0a64f0313913150543aa60059348f74e974e93267e3744c245ced"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.368252 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" event={"ID":"bb277c5b-5dac-473c-a509-51016a9b13f4","Type":"ContainerStarted","Data":"10750bd3db13bc52be1bcf8671d9cb8890c25960196554e7696e64e7a09d6f21"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.372274 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-n8mbn" event={"ID":"8c07900a-9014-44c3-b2a4-358a5a50dc09","Type":"ContainerStarted","Data":"37b004e4c702d859a1c35b2a7a64c916c1f332cbd1a96622ac10efd2b416f4d6"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.375090 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" event={"ID":"7dd22bb8-3b26-4a60-8321-5462640f7816","Type":"ContainerStarted","Data":"eef0c5bfec83b86e13da387cfd003e989a9c00ab00fc76ad1d6a152225aee021"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.375113 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" event={"ID":"7dd22bb8-3b26-4a60-8321-5462640f7816","Type":"ContainerStarted","Data":"41bd8f05a9da18976f586634f265b2842ccce8b596cddd4f577251b1269c7b07"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.378052 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" event={"ID":"a55ea739-d4ed-4594-9a3d-aca6728c0618","Type":"ContainerStarted","Data":"f409d7597371a9d1543559534a69facebedd53049bca82df90042b21343adf7b"} Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.421023 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6gns5" podStartSLOduration=120.421003035 podStartE2EDuration="2m0.421003035s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.410517857 +0000 UTC m=+138.001729389" watchObservedRunningTime="2025-11-26 11:12:58.421003035 +0000 UTC m=+138.012214558" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.433567 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.435274 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:58.935250275 +0000 UTC m=+138.526461798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.493780 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7qcfs" podStartSLOduration=120.49375587 podStartE2EDuration="2m0.49375587s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.492001222 +0000 UTC m=+138.083212744" watchObservedRunningTime="2025-11-26 11:12:58.49375587 +0000 UTC m=+138.084967391" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.536271 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.538576 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.038555885 +0000 UTC m=+138.629767397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.547061 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-zgbwd" podStartSLOduration=120.547041216 podStartE2EDuration="2m0.547041216s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.546006405 +0000 UTC m=+138.137217928" watchObservedRunningTime="2025-11-26 11:12:58.547041216 +0000 UTC m=+138.138252738" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.579671 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-mq7t5" podStartSLOduration=120.579652512 podStartE2EDuration="2m0.579652512s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.578119201 +0000 UTC m=+138.169330724" watchObservedRunningTime="2025-11-26 11:12:58.579652512 +0000 UTC m=+138.170864034" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.638351 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.638965 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.138944377 +0000 UTC m=+138.730155899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.672041 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kgngw" podStartSLOduration=120.672023536 podStartE2EDuration="2m0.672023536s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.624569709 +0000 UTC m=+138.215781230" watchObservedRunningTime="2025-11-26 11:12:58.672023536 +0000 UTC m=+138.263235057" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.744040 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zglsq" podStartSLOduration=120.744019261 podStartE2EDuration="2m0.744019261s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:58.743124706 +0000 UTC m=+138.334336228" watchObservedRunningTime="2025-11-26 11:12:58.744019261 +0000 UTC m=+138.335230784" Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.746115 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.746434 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.246423072 +0000 UTC m=+138.837634594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.790773 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4x7nf"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.790887 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.790964 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.848705 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.850951 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.35093288 +0000 UTC m=+138.942144402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: W1126 11:12:58.874102 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod731a82fb_93f9_42df_8dc3_7e0e0937b5a9.slice/crio-4c1f5a74dad86ed2f2279548337069aec4cff651fce93bd0052abdc63d26d13b WatchSource:0}: Error finding container 4c1f5a74dad86ed2f2279548337069aec4cff651fce93bd0052abdc63d26d13b: Status 404 returned error can't find the container with id 4c1f5a74dad86ed2f2279548337069aec4cff651fce93bd0052abdc63d26d13b Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.933803 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vprnt"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.941154 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.952475 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:58 crc kubenswrapper[4622]: E1126 11:12:58.953087 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.453072353 +0000 UTC m=+139.044283876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:58 crc kubenswrapper[4622]: I1126 11:12:58.967599 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.052976 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6fsv7" podStartSLOduration=121.052956815 podStartE2EDuration="2m1.052956815s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.052145787 +0000 UTC m=+138.643357309" watchObservedRunningTime="2025-11-26 11:12:59.052956815 +0000 UTC m=+138.644168338" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.053703 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.054085 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.554073881 +0000 UTC m=+139.145285403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.152378 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" podStartSLOduration=121.152362987 podStartE2EDuration="2m1.152362987s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.139038828 +0000 UTC m=+138.730250349" watchObservedRunningTime="2025-11-26 11:12:59.152362987 +0000 UTC m=+138.743574510" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.158119 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.158454 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.658442514 +0000 UTC m=+139.249654036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.205548 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-swz8p"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.261988 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.262384 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.762368493 +0000 UTC m=+139.353580015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.289404 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.302376 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kkvq6"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.308654 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627"] Nov 26 11:12:59 crc kubenswrapper[4622]: W1126 11:12:59.331699 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ef8cb79_5fb6_4438_bc49_4a88a304557d.slice/crio-1dde767ad5ffc4638a3a1a918e536a9a1665cb77eae46972f77119fb2a451e26 WatchSource:0}: Error finding container 1dde767ad5ffc4638a3a1a918e536a9a1665cb77eae46972f77119fb2a451e26: Status 404 returned error can't find the container with id 1dde767ad5ffc4638a3a1a918e536a9a1665cb77eae46972f77119fb2a451e26 Nov 26 11:12:59 crc kubenswrapper[4622]: W1126 11:12:59.333059 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca03ec70_790c_40c8_a009_dfa4946f6d06.slice/crio-307287078e2c1b9ea17ac28d219c6fe7c2eeab6f4fee994c1a8c19254aef4f1d WatchSource:0}: Error finding container 307287078e2c1b9ea17ac28d219c6fe7c2eeab6f4fee994c1a8c19254aef4f1d: Status 404 returned error can't find the container with id 307287078e2c1b9ea17ac28d219c6fe7c2eeab6f4fee994c1a8c19254aef4f1d Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.364703 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.365013 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.865001084 +0000 UTC m=+139.456212606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.366772 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.367686 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.376612 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98snv"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.376658 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.386642 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.399543 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.399792 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" event={"ID":"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e","Type":"ContainerStarted","Data":"6a24c6c26ce667f25969a04441f5d299e8bc0f6ed3d45bc9e11a0dc7a247df48"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.401266 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hpqsn" podStartSLOduration=121.40124202 podStartE2EDuration="2m1.40124202s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.386443914 +0000 UTC m=+138.977655446" watchObservedRunningTime="2025-11-26 11:12:59.40124202 +0000 UTC m=+138.992453542" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.401708 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" event={"ID":"867b2e21-3905-4d08-b96c-e23c8240d93d","Type":"ContainerStarted","Data":"378131fe0db66186f44d756abebd94be91bdba8be97485eac30ed9abbceff364"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.403920 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-5lz6m"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.404580 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" event={"ID":"d74ae43c-85c3-42b6-956b-dca218e79bb0","Type":"ContainerStarted","Data":"1042e32211e33a8f7e0e1aa97b73f64f2cb52bfb98fa88a3574c5d821a0090ad"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.404616 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" event={"ID":"d74ae43c-85c3-42b6-956b-dca218e79bb0","Type":"ContainerStarted","Data":"a23d134dc4c49bfde066c30a279d613ff197d72dc0a6c7e291fd5c09edc9f0e0"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.405329 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.407410 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" event={"ID":"8ef8cb79-5fb6-4438-bc49-4a88a304557d","Type":"ContainerStarted","Data":"1dde767ad5ffc4638a3a1a918e536a9a1665cb77eae46972f77119fb2a451e26"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.408456 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.408743 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-w7zs4" event={"ID":"1e710e37-8d2a-43f6-9e8f-7eb3233eb276","Type":"ContainerStarted","Data":"5b6ee57e1bea1f4b04acdff41e4911ce7c7e313c2b6119a310d66708ae687fdf"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.408981 4622 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4m9s2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.409024 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" podUID="d74ae43c-85c3-42b6-956b-dca218e79bb0" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.410316 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" event={"ID":"3db268a7-30fd-4031-8eba-72d60056bbbd","Type":"ContainerStarted","Data":"e03af246d2f4d088032eb00abddaccfe8e1949e121f137c2fb4377e9542a8c2a"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.416252 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-swz8p" event={"ID":"b144a741-5958-497e-9b68-8e569bfca541","Type":"ContainerStarted","Data":"78f677a9d938d1cee0e0882fd64586bc317aabb5a813644c0dc6448661d301d0"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.423047 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" event={"ID":"f84bd52b-3da4-47a1-a4fc-1c72fa27d846","Type":"ContainerStarted","Data":"732b67cec6acd7b56480948b7a64962bdacd172d33b1ec4f6f3a2c6aa0526c0d"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.423378 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" event={"ID":"f84bd52b-3da4-47a1-a4fc-1c72fa27d846","Type":"ContainerStarted","Data":"f1883a1b6dcc28fafcd02dec61faff3fc19618481d3de7acbd64f3d72a0d31fd"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.432036 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" event={"ID":"83f652a7-5618-40ee-8196-ddd4adba4175","Type":"ContainerStarted","Data":"e39cfb29dc8664b655e9b46557fb4c8a4a6c2e79a09ff77418d5726735455e22"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.432067 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" event={"ID":"83f652a7-5618-40ee-8196-ddd4adba4175","Type":"ContainerStarted","Data":"ed9fe7f98a0cf0cf3aba775372304a732c53ba77c4daa89f69f14f767c72af89"} Nov 26 11:12:59 crc kubenswrapper[4622]: W1126 11:12:59.437180 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f4e9e05_70fe_443d_9747_ea0849db83d4.slice/crio-eb019955c96174ff77a3af422e766435860a5c04e4a60a2258b5f5ca15eabee5 WatchSource:0}: Error finding container eb019955c96174ff77a3af422e766435860a5c04e4a60a2258b5f5ca15eabee5: Status 404 returned error can't find the container with id eb019955c96174ff77a3af422e766435860a5c04e4a60a2258b5f5ca15eabee5 Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.437648 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" event={"ID":"ca03ec70-790c-40c8-a009-dfa4946f6d06","Type":"ContainerStarted","Data":"307287078e2c1b9ea17ac28d219c6fe7c2eeab6f4fee994c1a8c19254aef4f1d"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.440527 4622 generic.go:334] "Generic (PLEG): container finished" podID="e202504f-0b8b-4709-b211-031ebf81cc77" containerID="4e8a22e577cbf1c91e1571931751856a3ff70cf41c54bcbecda6ec09eb05c8bf" exitCode=0 Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.440581 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" event={"ID":"e202504f-0b8b-4709-b211-031ebf81cc77","Type":"ContainerDied","Data":"4e8a22e577cbf1c91e1571931751856a3ff70cf41c54bcbecda6ec09eb05c8bf"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.440600 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" event={"ID":"e202504f-0b8b-4709-b211-031ebf81cc77","Type":"ContainerStarted","Data":"680908c58bf527ca4fdbaaa73d01fefe524a8595095fd0bfdc543231f513e354"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.454824 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" event={"ID":"f6aca4cf-35d0-47a4-9476-9a6888f31efb","Type":"ContainerStarted","Data":"a899cb8d3a165cdc029d3ab3e8511a624e2a6ec3b28b3591b2287977dda6670d"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.456005 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.464366 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-p78g4"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.487000 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.489281 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.989257085 +0000 UTC m=+139.580468607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.490026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.490117 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6tpz\" (UniqueName: \"kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.490176 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.490488 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.490916 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:12:59.990895884 +0000 UTC m=+139.582107406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.494701 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" event={"ID":"500dd093-4153-433e-b01b-fd8dfa5622f6","Type":"ContainerStarted","Data":"513dd180194bcc146b7f9dded3afecf1471611497e546d402d426b73215ad8f4"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.494761 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" event={"ID":"500dd093-4153-433e-b01b-fd8dfa5622f6","Type":"ContainerStarted","Data":"aeef3ac4febe4380c6d8bb1f61cac8036fcc200d075078a9b5e7bcc93afd9705"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.506091 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9vzz6"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.511727 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-n8mbn" event={"ID":"8c07900a-9014-44c3-b2a4-358a5a50dc09","Type":"ContainerStarted","Data":"a4ffa0d7979686164844144e3c1f2ca2a36c22794ceea05ab3e33e6c927febe8"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.537903 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" event={"ID":"731a82fb-93f9-42df-8dc3-7e0e0937b5a9","Type":"ContainerStarted","Data":"4c1f5a74dad86ed2f2279548337069aec4cff651fce93bd0052abdc63d26d13b"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.552235 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" event={"ID":"a55ea739-d4ed-4594-9a3d-aca6728c0618","Type":"ContainerStarted","Data":"eb8cb0d6a36db24bf0f7f719357c038a2a4c8bdf282694c86bbcc8ce8245e2d6"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.552277 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" event={"ID":"a55ea739-d4ed-4594-9a3d-aca6728c0618","Type":"ContainerStarted","Data":"ee009e50ce858c461e4bd41d560e3622ea3390ae7395174bf8657c61e1511ef5"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.558156 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r2tzg" event={"ID":"c58a0d86-56ac-4aaa-b2c9-995c925cd839","Type":"ContainerStarted","Data":"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.558201 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r2tzg" event={"ID":"c58a0d86-56ac-4aaa-b2c9-995c925cd839","Type":"ContainerStarted","Data":"67d2db5645bd7ecba9970d32347525db60b6c8aa0fc4831443d3338030b1bd59"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.565111 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.566521 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.566611 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.576397 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" event={"ID":"05f00222-b862-4024-a903-c006f34852fb","Type":"ContainerStarted","Data":"099cccb0f6c810668605e3a8ad4cf4bbbd9885ec70f13a02d158ca7b2168e5d4"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.576590 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.578214 4622 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g675c container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.578282 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" event={"ID":"d6415e96-60f9-4d4a-8476-c39c404ba62d","Type":"ContainerStarted","Data":"f15af812bd83a4b1e022a25affb78fc3842cec5f61d30b374a93c3c511c64384"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.578283 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" podUID="05f00222-b862-4024-a903-c006f34852fb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.581931 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" event={"ID":"ad723c1b-c1ad-40e9-9527-a177e0f01117","Type":"ContainerStarted","Data":"b93218e1990a9c741fbea17cdf76fde49b7564937779fe0e94ff309c16e96768"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.581986 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" event={"ID":"ad723c1b-c1ad-40e9-9527-a177e0f01117","Type":"ContainerStarted","Data":"5e77cd050dea8260f1e8b69801102b7f5be9009f50448699f3c777bdb1921342"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.587556 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" event={"ID":"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c","Type":"ContainerStarted","Data":"a5186e1a4ffbcecbfcc5004f1f796538e0992e835e9a590873978c9cc0694f97"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.587675 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" event={"ID":"71fcfb8e-5a8a-44fe-9933-cee4d8c0012c","Type":"ContainerStarted","Data":"2bd72cf64581ca1d3e0c31a7efc57850b24e4a4d6be174607f099f1cb378ffa0"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.587749 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.590430 4622 patch_prober.go:28] interesting pod/console-operator-58897d9998-cwk8q container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.590461 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" podUID="71fcfb8e-5a8a-44fe-9933-cee4d8c0012c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.592430 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.592581 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.092564008 +0000 UTC m=+139.683775530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.592883 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.592984 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.593100 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6tpz\" (UniqueName: \"kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.593187 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.594984 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.094972486 +0000 UTC m=+139.686184009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.595424 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.596180 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.596265 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" podStartSLOduration=120.596229675 podStartE2EDuration="2m0.596229675s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.591050705 +0000 UTC m=+139.182262228" watchObservedRunningTime="2025-11-26 11:12:59.596229675 +0000 UTC m=+139.187441188" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.602663 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.607995 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" event={"ID":"3f6822df-f9db-480d-bf69-58c39f017ccc","Type":"ContainerStarted","Data":"dfacab0f61c1953e51742855cd086260be333766fe73428cd59b6747762d996c"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.621857 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" event={"ID":"af17650f-233a-479a-9152-082ca02f5cbb","Type":"ContainerStarted","Data":"61e2b810d1d63d34c0274ee25f0b42751fd90d29e0e08f62993dfe7caea67d4c"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.622989 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.623804 4622 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-b44gm container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" start-of-body= Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.623831 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" podUID="af17650f-233a-479a-9152-082ca02f5cbb" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.625165 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" event={"ID":"ac45a607-6d21-475e-8f82-ed9bbcbb1f65","Type":"ContainerStarted","Data":"b44a937bb7e28d2313b08a7aa317326c16081ca8a923f485068a91496a173c20"} Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.629215 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.629946 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.634981 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:12:59 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:12:59 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:12:59 crc kubenswrapper[4622]: healthz check failed Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.635023 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.658574 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6tpz\" (UniqueName: \"kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz\") pod \"certified-operators-l97bl\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.685015 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8vgv8" podStartSLOduration=121.684998821 podStartE2EDuration="2m1.684998821s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.682258408 +0000 UTC m=+139.273469929" watchObservedRunningTime="2025-11-26 11:12:59.684998821 +0000 UTC m=+139.276210344" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.690959 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.693987 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.695352 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.695957 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm9l7\" (UniqueName: \"kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.696001 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.696162 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.196078021 +0000 UTC m=+139.787289543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.752212 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dg7cl" podStartSLOduration=121.752194191 podStartE2EDuration="2m1.752194191s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.71779694 +0000 UTC m=+139.309008461" watchObservedRunningTime="2025-11-26 11:12:59.752194191 +0000 UTC m=+139.343405713" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.754155 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.762079 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.774640 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-p5szg" podStartSLOduration=121.77461813 podStartE2EDuration="2m1.77461813s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.758002998 +0000 UTC m=+139.349214521" watchObservedRunningTime="2025-11-26 11:12:59.77461813 +0000 UTC m=+139.365829652" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.775392 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.797518 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.797589 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.797661 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm9l7\" (UniqueName: \"kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.797689 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.797787 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.29776989 +0000 UTC m=+139.888981401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.798136 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.813184 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" podStartSLOduration=121.813166965 podStartE2EDuration="2m1.813166965s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.806841254 +0000 UTC m=+139.398052766" watchObservedRunningTime="2025-11-26 11:12:59.813166965 +0000 UTC m=+139.404378478" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.815273 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.882072 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-w7zs4" podStartSLOduration=121.88203133 podStartE2EDuration="2m1.88203133s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.881395792 +0000 UTC m=+139.472607324" watchObservedRunningTime="2025-11-26 11:12:59.88203133 +0000 UTC m=+139.473257109" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.908475 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.908976 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.909006 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpcqw\" (UniqueName: \"kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.909101 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:12:59 crc kubenswrapper[4622]: E1126 11:12:59.909303 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.409289128 +0000 UTC m=+140.000500649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.912578 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm9l7\" (UniqueName: \"kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7\") pod \"community-operators-d4zmf\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.913085 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6zjxz" podStartSLOduration=121.913070694 podStartE2EDuration="2m1.913070694s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.911769693 +0000 UTC m=+139.502981225" watchObservedRunningTime="2025-11-26 11:12:59.913070694 +0000 UTC m=+139.504282216" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.977001 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" podStartSLOduration=121.976980462 podStartE2EDuration="2m1.976980462s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:12:59.976759255 +0000 UTC m=+139.567970778" watchObservedRunningTime="2025-11-26 11:12:59.976980462 +0000 UTC m=+139.568191985" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.988762 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.990437 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:12:59 crc kubenswrapper[4622]: I1126 11:12:59.991575 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.009948 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.010153 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.010252 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.010271 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpcqw\" (UniqueName: \"kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.010751 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.510740563 +0000 UTC m=+140.101952086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.011081 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.011298 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.038420 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpcqw\" (UniqueName: \"kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw\") pod \"certified-operators-6j9ln\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.098597 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.111966 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.112235 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps4zn\" (UniqueName: \"kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.112260 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.112317 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.612302938 +0000 UTC m=+140.203514460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.112426 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.112490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.112952 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.612941141 +0000 UTC m=+140.204152663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.120371 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bwskx" podStartSLOduration=122.120361164 podStartE2EDuration="2m2.120361164s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.113224364 +0000 UTC m=+139.704435886" watchObservedRunningTime="2025-11-26 11:13:00.120361164 +0000 UTC m=+139.711572686" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.121240 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-n8mbn" podStartSLOduration=6.121233808 podStartE2EDuration="6.121233808s" podCreationTimestamp="2025-11-26 11:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.07763749 +0000 UTC m=+139.668849013" watchObservedRunningTime="2025-11-26 11:13:00.121233808 +0000 UTC m=+139.712445330" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.199243 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.200569 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" podStartSLOduration=122.200551553 podStartE2EDuration="2m2.200551553s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.16999589 +0000 UTC m=+139.761207413" watchObservedRunningTime="2025-11-26 11:13:00.200551553 +0000 UTC m=+139.791763075" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.201141 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9d7nf" podStartSLOduration=122.201135433 podStartE2EDuration="2m2.201135433s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.19939362 +0000 UTC m=+139.790605143" watchObservedRunningTime="2025-11-26 11:13:00.201135433 +0000 UTC m=+139.792346955" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.213276 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.218709 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.718674987 +0000 UTC m=+140.309886509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.218765 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.218797 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.218986 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps4zn\" (UniqueName: \"kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.219004 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.219346 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.219582 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.719575063 +0000 UTC m=+140.310786584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.219588 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.236228 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" podStartSLOduration=122.236212537 podStartE2EDuration="2m2.236212537s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.232534375 +0000 UTC m=+139.823745897" watchObservedRunningTime="2025-11-26 11:13:00.236212537 +0000 UTC m=+139.827424058" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.278980 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps4zn\" (UniqueName: \"kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn\") pod \"community-operators-7zqqd\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.319438 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.319901 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.819885779 +0000 UTC m=+140.411097300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.351134 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.356193 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" podStartSLOduration=122.356178633 podStartE2EDuration="2m2.356178633s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.353281774 +0000 UTC m=+139.944493296" watchObservedRunningTime="2025-11-26 11:13:00.356178633 +0000 UTC m=+139.947390155" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.427325 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.427722 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:00.927711056 +0000 UTC m=+140.518922578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.433412 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-r2tzg" podStartSLOduration=122.433394145 podStartE2EDuration="2m2.433394145s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.428154802 +0000 UTC m=+140.019366324" watchObservedRunningTime="2025-11-26 11:13:00.433394145 +0000 UTC m=+140.024605668" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.530682 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.530823 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.030802102 +0000 UTC m=+140.622013624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.531194 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.531588 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.031576059 +0000 UTC m=+140.622787582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.632674 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.633265 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.13324712 +0000 UTC m=+140.724458642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.634265 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:00 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:00 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:00 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.634303 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.648363 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" event={"ID":"8ef8cb79-5fb6-4438-bc49-4a88a304557d","Type":"ContainerStarted","Data":"77e861a084991d831c859bc74c99521cb75b3e9f996d18f9b046af9c288a3d0b"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.649639 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" event={"ID":"ca03ec70-790c-40c8-a009-dfa4946f6d06","Type":"ContainerStarted","Data":"b662411d52cc29159b0dcea79234cee7a625fd4e4d3b726e68dad2aca6a7a7d4"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.649658 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" event={"ID":"ca03ec70-790c-40c8-a009-dfa4946f6d06","Type":"ContainerStarted","Data":"255dd9019c0d4df2dcc23b7dba96cb6a700d6dfe989f21f7f3e0bf995b923711"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.660527 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" event={"ID":"e202504f-0b8b-4709-b211-031ebf81cc77","Type":"ContainerStarted","Data":"aff7eb97599fd11302031f9fd7af920a6acae993666877e040309b3741d76b7f"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.662170 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" event={"ID":"f8bc3e2f-5cfa-49da-863d-e8c611605186","Type":"ContainerStarted","Data":"cd757269bd0cefde1b0b41c0a42e59287689a302bc3e031bf77d14ebefd648bf"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.663449 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" event={"ID":"ac45a607-6d21-475e-8f82-ed9bbcbb1f65","Type":"ContainerStarted","Data":"bbebc6997db78ce990a62a4c7a91b632916c0f0c5ed3d465049f1e1bec9e70bc"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.663467 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" event={"ID":"ac45a607-6d21-475e-8f82-ed9bbcbb1f65","Type":"ContainerStarted","Data":"9b0dca04db8afda646d4197a9a650aee206bc0359e9cd6eb6fc463c87069afa0"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.696082 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" event={"ID":"af17650f-233a-479a-9152-082ca02f5cbb","Type":"ContainerStarted","Data":"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.700029 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4x7nf" event={"ID":"731a82fb-93f9-42df-8dc3-7e0e0937b5a9","Type":"ContainerStarted","Data":"8054a7c04f48ae90b44d2989ad9894cb6e23697cdd14f554dfa011f13a91a7b0"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.734067 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.735806 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.235796094 +0000 UTC m=+140.827007616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.738604 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tq627" podStartSLOduration=122.738589077 podStartE2EDuration="2m2.738589077s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:00.722231651 +0000 UTC m=+140.313443173" watchObservedRunningTime="2025-11-26 11:13:00.738589077 +0000 UTC m=+140.329800599" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.798460 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.802822 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.834904 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.836071 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.336057437 +0000 UTC m=+140.927268959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.853469 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" event={"ID":"d6415e96-60f9-4d4a-8476-c39c404ba62d","Type":"ContainerStarted","Data":"4d6b428dd9b918367202c02cd7d59eb9d5e9892d8689a3f10b79328aa6674f84"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.853521 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" event={"ID":"d6415e96-60f9-4d4a-8476-c39c404ba62d","Type":"ContainerStarted","Data":"5e71a26490f0939de227bb63ff7b686005a79b02dd2e1d9831d75960687daa2f"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.888239 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.900754 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" event={"ID":"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b","Type":"ContainerStarted","Data":"c078510522a708629082168cc9535d62111f0214715164570d2dcd10d3d54016"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.900810 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" event={"ID":"2a6ab38e-e6a5-4f40-9535-aaf0eb2bb01b","Type":"ContainerStarted","Data":"9d42369a399ec2c591f27f44a157ef8330504fa71b78873639137c9f4d731d52"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.928352 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" event={"ID":"867b2e21-3905-4d08-b96c-e23c8240d93d","Type":"ContainerStarted","Data":"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.930072 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.936457 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:00 crc kubenswrapper[4622]: E1126 11:13:00.937470 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.437458879 +0000 UTC m=+141.028670400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.940851 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.944679 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" event={"ID":"4f4e9e05-70fe-443d-9747-ea0849db83d4","Type":"ContainerStarted","Data":"783faa42d88e8aff5fc1e280139b7b6409cdfdcd842f4a328998e4469f07450c"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.944723 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" event={"ID":"4f4e9e05-70fe-443d-9747-ea0849db83d4","Type":"ContainerStarted","Data":"c6c76a2961fdabfba09ddcaf811c4be543ad81e2065c3d2af702fdb2c81bdbac"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.944734 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" event={"ID":"4f4e9e05-70fe-443d-9747-ea0849db83d4","Type":"ContainerStarted","Data":"eb019955c96174ff77a3af422e766435860a5c04e4a60a2258b5f5ca15eabee5"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.975228 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.995006 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" event={"ID":"83f652a7-5618-40ee-8196-ddd4adba4175","Type":"ContainerStarted","Data":"33934ae5560a74eb4edd5e7c7b28346d7f6ce60aaf5be3249653494e55f34ec8"} Nov 26 11:13:00 crc kubenswrapper[4622]: I1126 11:13:00.995656 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.020710 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9vzz6" event={"ID":"cee0e42c-d907-4529-892f-ee830f463490","Type":"ContainerStarted","Data":"138a2e8dd70a3e7204ee8b629f19dfb46e8fb8967e3bfd834fa8882d1b0cfd7f"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.020778 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9vzz6" event={"ID":"cee0e42c-d907-4529-892f-ee830f463490","Type":"ContainerStarted","Data":"6ef7f1374cfa2b3671502cfd1df3772caaf5861b645ac71a13fad30f95e0e329"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.039163 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.040605 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.540587424 +0000 UTC m=+141.131798946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.043765 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.061847 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5lz6m" event={"ID":"63b1178e-6e77-4714-96eb-7dc978789aaa","Type":"ContainerStarted","Data":"d154dbeac7bf5bbee4fa4d1fae8cb30b3cc726e971d369fab631c576ceaf8409"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.061888 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5lz6m" event={"ID":"63b1178e-6e77-4714-96eb-7dc978789aaa","Type":"ContainerStarted","Data":"6947b689bed670826bd5af9c511b8bcef1658c09b2ac4fe1104df39e7c88dc37"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.062568 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.063149 4622 patch_prober.go:28] interesting pod/downloads-7954f5f757-5lz6m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.063195 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5lz6m" podUID="63b1178e-6e77-4714-96eb-7dc978789aaa" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.084984 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-swz8p" event={"ID":"b144a741-5958-497e-9b68-8e569bfca541","Type":"ContainerStarted","Data":"5e2a1885c0cb7d451e62fed21ae1b6418a4cb967e59052e88d06cf3f71755cc0"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.103611 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" event={"ID":"fda35c7d-d0bc-4730-b21d-a66ff3b0f62e","Type":"ContainerStarted","Data":"84f6653a2cf26cad9e9192144cf5dee0ea34305ac5a2f3e037a42b33a31bf47c"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.141186 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.142136 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.642119322 +0000 UTC m=+141.233330844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.142928 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" event={"ID":"3db268a7-30fd-4031-8eba-72d60056bbbd","Type":"ContainerStarted","Data":"29f854cf4a79e1554349fbd2049c45246cbbf0e1e92a7178c7233ed0237edd30"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.173902 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" event={"ID":"b4df4382-b969-46c2-b5f7-61631a2bfc06","Type":"ContainerStarted","Data":"c48fdd086a83e4273a620f9b8469d3b482328814023eb115c5881308761a74b3"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.173954 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" event={"ID":"b4df4382-b969-46c2-b5f7-61631a2bfc06","Type":"ContainerStarted","Data":"fc019267af8d0e08847ead4242a54318409ffe84bcd961cad3a8621207a85fb8"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.177622 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" event={"ID":"7dae3565-6d26-4f06-a148-66466f9ec9bb","Type":"ContainerStarted","Data":"52136ee298bb8f1b93d7a9f32fb1a39e2b52a8e4bed1ec20f1258f85b6ee4520"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.177650 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" event={"ID":"7dae3565-6d26-4f06-a148-66466f9ec9bb","Type":"ContainerStarted","Data":"217b710a1d8e60de11c1c55cecb6bdc6535cec025791a7a66a4b2114c6b67c0c"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.179107 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.202280 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" event={"ID":"05f00222-b862-4024-a903-c006f34852fb","Type":"ContainerStarted","Data":"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109"} Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.210077 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4m9s2" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.210124 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-trgkr" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.220781 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-cwk8q" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.221057 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.246025 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.247014 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.746998267 +0000 UTC m=+141.338209789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.297223 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vprnt" podStartSLOduration=123.297207506 podStartE2EDuration="2m3.297207506s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.295797148 +0000 UTC m=+140.887008670" watchObservedRunningTime="2025-11-26 11:13:01.297207506 +0000 UTC m=+140.888419028" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.348105 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.351779 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.851763808 +0000 UTC m=+141.442975330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.417167 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" podStartSLOduration=123.417141371 podStartE2EDuration="2m3.417141371s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.33981518 +0000 UTC m=+140.931026702" watchObservedRunningTime="2025-11-26 11:13:01.417141371 +0000 UTC m=+141.008352894" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.418687 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.431981 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.461602 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.461618 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.462740 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:01.962720266 +0000 UTC m=+141.553931788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.467955 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.468708 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb71a6300_1cfd_4eb0_b75c_7231184cfe79.slice/crio-conmon-60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc1d52e_ff99_4495_8eef_bc54bf430361.slice/crio-conmon-de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc1d52e_ff99_4495_8eef_bc54bf430361.slice/crio-de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf122252_a014_4afb_b751_bd4e53793b53.slice/crio-conmon-7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2.scope\": RecentStats: unable to find data in memory cache]" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.483941 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-69qr9" podStartSLOduration=123.483928853 podStartE2EDuration="2m3.483928853s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.480824142 +0000 UTC m=+141.072035664" watchObservedRunningTime="2025-11-26 11:13:01.483928853 +0000 UTC m=+141.075140375" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.567323 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" podStartSLOduration=123.567305817 podStartE2EDuration="2m3.567305817s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.538773287 +0000 UTC m=+141.129984809" watchObservedRunningTime="2025-11-26 11:13:01.567305817 +0000 UTC m=+141.158517339" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.568559 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" podStartSLOduration=122.568553358 podStartE2EDuration="2m2.568553358s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.567751798 +0000 UTC m=+141.158963320" watchObservedRunningTime="2025-11-26 11:13:01.568553358 +0000 UTC m=+141.159764881" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.568697 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8snr\" (UniqueName: \"kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.568750 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.568844 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.568890 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.569241 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.06922805 +0000 UTC m=+141.660439571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.602782 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" podStartSLOduration=123.602760421 podStartE2EDuration="2m3.602760421s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.599103 +0000 UTC m=+141.190314522" watchObservedRunningTime="2025-11-26 11:13:01.602760421 +0000 UTC m=+141.193971944" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.634633 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:01 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:01 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:01 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.634915 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.642787 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98snv" podStartSLOduration=122.642772384 podStartE2EDuration="2m2.642772384s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.63971379 +0000 UTC m=+141.230925312" watchObservedRunningTime="2025-11-26 11:13:01.642772384 +0000 UTC m=+141.233983897" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.672135 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.672617 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.172603381 +0000 UTC m=+141.763814903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.672849 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.673009 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.673125 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.673271 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8snr\" (UniqueName: \"kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.674209 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.674541 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.174520343 +0000 UTC m=+141.765731865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.674955 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.705372 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8snr\" (UniqueName: \"kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr\") pod \"redhat-marketplace-5bd7z\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.729393 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-645bl" podStartSLOduration=123.729378283 podStartE2EDuration="2m3.729378283s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.727894347 +0000 UTC m=+141.319105870" watchObservedRunningTime="2025-11-26 11:13:01.729378283 +0000 UTC m=+141.320589806" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.756851 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.757906 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.769074 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-5lz6m" podStartSLOduration=123.769061838 podStartE2EDuration="2m3.769061838s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.768301825 +0000 UTC m=+141.359513347" watchObservedRunningTime="2025-11-26 11:13:01.769061838 +0000 UTC m=+141.360273359" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.774322 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.774783 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.274765485 +0000 UTC m=+141.865977008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.780730 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.808826 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" podStartSLOduration=123.808803821 podStartE2EDuration="2m3.808803821s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.808683916 +0000 UTC m=+141.399895438" watchObservedRunningTime="2025-11-26 11:13:01.808803821 +0000 UTC m=+141.400015344" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.821759 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.828724 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-w4wmd" podStartSLOduration=123.828704554 podStartE2EDuration="2m3.828704554s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.827330124 +0000 UTC m=+141.418541645" watchObservedRunningTime="2025-11-26 11:13:01.828704554 +0000 UTC m=+141.419916076" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.861811 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-swz8p" podStartSLOduration=7.861790805 podStartE2EDuration="7.861790805s" podCreationTimestamp="2025-11-26 11:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.860360541 +0000 UTC m=+141.451572063" watchObservedRunningTime="2025-11-26 11:13:01.861790805 +0000 UTC m=+141.453002328" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.869131 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.869264 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.876874 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.876986 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.877053 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hf8d\" (UniqueName: \"kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.877190 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.877561 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.377549463 +0000 UTC m=+141.968760985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.881884 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.890073 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-p78g4" podStartSLOduration=122.890055139 podStartE2EDuration="2m2.890055139s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:01.88909491 +0000 UTC m=+141.480306432" watchObservedRunningTime="2025-11-26 11:13:01.890055139 +0000 UTC m=+141.481266662" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.981628 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.982088 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.982173 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.982191 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hf8d\" (UniqueName: \"kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.982871 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:01 crc kubenswrapper[4622]: E1126 11:13:01.982945 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.482931295 +0000 UTC m=+142.074142817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:01 crc kubenswrapper[4622]: I1126 11:13:01.982959 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.012330 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hf8d\" (UniqueName: \"kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d\") pod \"redhat-marketplace-xcvpx\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.080837 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.109234 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.109680 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.609654395 +0000 UTC m=+142.200865917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.150690 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x7ccv" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.214766 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.215228 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.715214033 +0000 UTC m=+142.306425555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.266116 4622 generic.go:334] "Generic (PLEG): container finished" podID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerID="60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf" exitCode=0 Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.266199 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerDied","Data":"60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.266230 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerStarted","Data":"aee024512351b8be977aab42420a79e3ef0b5d6c770e0525d943302d4645e59f"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.272994 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.287519 4622 generic.go:334] "Generic (PLEG): container finished" podID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerID="de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae" exitCode=0 Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.287623 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerDied","Data":"de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.287682 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerStarted","Data":"8dd8ac5dd9db73e2ed5b3a434583d74b5983698f2cbfc3ca579ec4ee0a33c00d"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.316199 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.316666 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.816652925 +0000 UTC m=+142.407864437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.324946 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" event={"ID":"e202504f-0b8b-4709-b211-031ebf81cc77","Type":"ContainerStarted","Data":"7f0c1ff8fe1029c681cd1a99de3cb6888f3abfdfa8cf63646f87efe0d29f7746"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.347869 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9vzz6" event={"ID":"cee0e42c-d907-4529-892f-ee830f463490","Type":"ContainerStarted","Data":"7a1b35092e73ec517f519d3e425c5b6d9bea960da16748cbd884a907560d2292"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.348615 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9vzz6" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.374092 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" event={"ID":"8ef8cb79-5fb6-4438-bc49-4a88a304557d","Type":"ContainerStarted","Data":"62e5a965bc09559059c2419691c94c74fc1cb71820116dd2b612b184633fa3ed"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.399794 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9vzz6" podStartSLOduration=8.399776312 podStartE2EDuration="8.399776312s" podCreationTimestamp="2025-11-26 11:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:02.390891238 +0000 UTC m=+141.982102759" watchObservedRunningTime="2025-11-26 11:13:02.399776312 +0000 UTC m=+141.990987834" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.410921 4622 generic.go:334] "Generic (PLEG): container finished" podID="acccae42-8133-475f-ad53-dbfa434e5e45" containerID="e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c" exitCode=0 Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.411267 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerDied","Data":"e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.411299 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerStarted","Data":"1f3cb3394fa16af51bbaefdabe25f42a7d2d3648983a1231f428305d92af1db4"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.417614 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.417725 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.917710459 +0000 UTC m=+142.508921980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.418078 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.420575 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:02.920562182 +0000 UTC m=+142.511773705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.424306 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.424752 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.435599 4622 generic.go:334] "Generic (PLEG): container finished" podID="af122252-a014-4afb-b751-bd4e53793b53" containerID="7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2" exitCode=0 Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.436266 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerDied","Data":"7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.436299 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerStarted","Data":"8ccf4938f8a0eec88b77a247d0f1234adf0bf90818f88af9150276532b8d3f30"} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.437069 4622 patch_prober.go:28] interesting pod/downloads-7954f5f757-5lz6m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.437100 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5lz6m" podUID="63b1178e-6e77-4714-96eb-7dc978789aaa" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.447903 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dscwx" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.455910 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" podStartSLOduration=124.45589599 podStartE2EDuration="2m4.45589599s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:02.424768239 +0000 UTC m=+142.015979760" watchObservedRunningTime="2025-11-26 11:13:02.45589599 +0000 UTC m=+142.047107522" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.521032 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.523890 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:03.023873774 +0000 UTC m=+142.615085296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.561429 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.605433 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.623880 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.624256 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.624959 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.625322 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:03.125305302 +0000 UTC m=+142.716516824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.627993 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.631820 4622 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.653646 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:02 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:02 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:02 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.653733 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.693464 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.726258 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.726692 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.726786 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcjsh\" (UniqueName: \"kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.726881 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.727028 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-26 11:13:03.227013061 +0000 UTC m=+142.818224583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.828092 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.828194 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.828240 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.828276 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcjsh\" (UniqueName: \"kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.829164 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: E1126 11:13:02.829434 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-26 11:13:03.329423643 +0000 UTC m=+142.920635165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqt6v" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.829430 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.845216 4622 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-26T11:13:02.631844074Z","Handler":null,"Name":""} Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.879862 4622 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.879923 4622 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.889448 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcjsh\" (UniqueName: \"kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh\") pod \"redhat-operators-wnxq7\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.929432 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.956523 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.957060 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.957449 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:02 crc kubenswrapper[4622]: I1126 11:13:02.974954 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.013412 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.032128 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.032228 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.032258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqcns\" (UniqueName: \"kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.032273 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.081553 4622 patch_prober.go:28] interesting pod/apiserver-76f77b778f-8g8bw container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]log ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]etcd ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/generic-apiserver-start-informers ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/max-in-flight-filter ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/image.openshift.io-apiserver-caches ok Nov 26 11:13:03 crc kubenswrapper[4622]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Nov 26 11:13:03 crc kubenswrapper[4622]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/project.openshift.io-projectcache ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/openshift.io-startinformers ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/openshift.io-restmapperupdater ok Nov 26 11:13:03 crc kubenswrapper[4622]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 26 11:13:03 crc kubenswrapper[4622]: livez check failed Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.081602 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" podUID="e202504f-0b8b-4709-b211-031ebf81cc77" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.097257 4622 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.097295 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.135453 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.135574 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqcns\" (UniqueName: \"kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.135593 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.136063 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.136272 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.163215 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqcns\" (UniqueName: \"kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns\") pod \"redhat-operators-ts6wt\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.176429 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqt6v\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.188875 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.380842 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.417842 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:13:03 crc kubenswrapper[4622]: W1126 11:13:03.461245 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31f8dcbc_2fc0_4edd_abf1_2f2aa24a89a6.slice/crio-e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d WatchSource:0}: Error finding container e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d: Status 404 returned error can't find the container with id e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.463714 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" event={"ID":"8ef8cb79-5fb6-4438-bc49-4a88a304557d","Type":"ContainerStarted","Data":"02838a321cd343b950a53ab2667ac57a216bf6782305add1202a50cb5929d82f"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.463759 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" event={"ID":"8ef8cb79-5fb6-4438-bc49-4a88a304557d","Type":"ContainerStarted","Data":"965bffc30996dc3172e2283093ba12c966cf6dffb2c4e5c241592f0c38013fe0"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.491904 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerID="02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b" exitCode=0 Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.491992 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerDied","Data":"02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.492042 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerStarted","Data":"47f41e615c9343ff142551232b62db8214248bc4bae712d386fa8100b18e4786"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.494641 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" event={"ID":"3db268a7-30fd-4031-8eba-72d60056bbbd","Type":"ContainerDied","Data":"29f854cf4a79e1554349fbd2049c45246cbbf0e1e92a7178c7233ed0237edd30"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.494492 4622 generic.go:334] "Generic (PLEG): container finished" podID="3db268a7-30fd-4031-8eba-72d60056bbbd" containerID="29f854cf4a79e1554349fbd2049c45246cbbf0e1e92a7178c7233ed0237edd30" exitCode=0 Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.511610 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-kkvq6" podStartSLOduration=9.511485963 podStartE2EDuration="9.511485963s" podCreationTimestamp="2025-11-26 11:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:03.506874933 +0000 UTC m=+143.098086456" watchObservedRunningTime="2025-11-26 11:13:03.511485963 +0000 UTC m=+143.102697484" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.517891 4622 generic.go:334] "Generic (PLEG): container finished" podID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerID="1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2" exitCode=0 Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.518579 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerDied","Data":"1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.518612 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerStarted","Data":"61ca90ac0079ea6875e01d5e9dae52a2f1bdd3660c2baca11ea1a752bd47c757"} Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.519819 4622 patch_prober.go:28] interesting pod/downloads-7954f5f757-5lz6m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.519850 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5lz6m" podUID="63b1178e-6e77-4714-96eb-7dc978789aaa" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.593120 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.653666 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:03 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:03 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:03 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.658194 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.696967 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:03 crc kubenswrapper[4622]: W1126 11:13:03.776205 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0edf3634_60b1_4d74_a80b_9e49fb4f5e1f.slice/crio-88ed69733157e22ac77d885af0f383c140894c503203656fd3770cf8c833b7ab WatchSource:0}: Error finding container 88ed69733157e22ac77d885af0f383c140894c503203656fd3770cf8c833b7ab: Status 404 returned error can't find the container with id 88ed69733157e22ac77d885af0f383c140894c503203656fd3770cf8c833b7ab Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.985677 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.986637 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.992102 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 26 11:13:03 crc kubenswrapper[4622]: I1126 11:13:03.992566 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:03.996660 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.055635 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.055709 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.157206 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.157280 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.157317 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.174029 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.307870 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.527846 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" event={"ID":"5012bed8-6f9e-47b8-9f71-5eff34c9d997","Type":"ContainerStarted","Data":"c94c4f8ecf4df3241b4abd7b2a2f93ee9b59d2ad6616118594a895b9cbdaf2bb"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.527914 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" event={"ID":"5012bed8-6f9e-47b8-9f71-5eff34c9d997","Type":"ContainerStarted","Data":"33278b3b86ba917e3c05ccced1e5717433afa49ffb4af9f4b045945353760f6b"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.527961 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.531289 4622 generic.go:334] "Generic (PLEG): container finished" podID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerID="b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b" exitCode=0 Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.531361 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerDied","Data":"b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.531418 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerStarted","Data":"88ed69733157e22ac77d885af0f383c140894c503203656fd3770cf8c833b7ab"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.535574 4622 generic.go:334] "Generic (PLEG): container finished" podID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerID="b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99" exitCode=0 Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.536174 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerDied","Data":"b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.537403 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerStarted","Data":"e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d"} Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.551033 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" podStartSLOduration=126.551007526 podStartE2EDuration="2m6.551007526s" podCreationTimestamp="2025-11-26 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:04.548550997 +0000 UTC m=+144.139762539" watchObservedRunningTime="2025-11-26 11:13:04.551007526 +0000 UTC m=+144.142219048" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.633610 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:04 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:04 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:04 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.633667 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.728569 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.825782 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.870693 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume\") pod \"3db268a7-30fd-4031-8eba-72d60056bbbd\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.871125 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume\") pod \"3db268a7-30fd-4031-8eba-72d60056bbbd\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.871278 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z4vz\" (UniqueName: \"kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz\") pod \"3db268a7-30fd-4031-8eba-72d60056bbbd\" (UID: \"3db268a7-30fd-4031-8eba-72d60056bbbd\") " Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.872140 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume" (OuterVolumeSpecName: "config-volume") pod "3db268a7-30fd-4031-8eba-72d60056bbbd" (UID: "3db268a7-30fd-4031-8eba-72d60056bbbd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.893673 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.895521 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3db268a7-30fd-4031-8eba-72d60056bbbd" (UID: "3db268a7-30fd-4031-8eba-72d60056bbbd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.901925 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz" (OuterVolumeSpecName: "kube-api-access-4z4vz") pod "3db268a7-30fd-4031-8eba-72d60056bbbd" (UID: "3db268a7-30fd-4031-8eba-72d60056bbbd"). InnerVolumeSpecName "kube-api-access-4z4vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.973033 4622 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3db268a7-30fd-4031-8eba-72d60056bbbd-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.973065 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3db268a7-30fd-4031-8eba-72d60056bbbd-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:04 crc kubenswrapper[4622]: I1126 11:13:04.973074 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z4vz\" (UniqueName: \"kubernetes.io/projected/3db268a7-30fd-4031-8eba-72d60056bbbd-kube-api-access-4z4vz\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.556483 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" event={"ID":"3db268a7-30fd-4031-8eba-72d60056bbbd","Type":"ContainerDied","Data":"e03af246d2f4d088032eb00abddaccfe8e1949e121f137c2fb4377e9542a8c2a"} Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.556776 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e03af246d2f4d088032eb00abddaccfe8e1949e121f137c2fb4377e9542a8c2a" Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.556534 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz" Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.566642 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b93c2d6c-1d6a-4784-84c4-0255edb84061","Type":"ContainerStarted","Data":"d0979e0b96e67f1474188667406139e4235ed183c7b831c3ea2b666f16ef4c77"} Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.580456 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.580435948 podStartE2EDuration="2.580435948s" podCreationTimestamp="2025-11-26 11:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:05.579287092 +0000 UTC m=+145.170498625" watchObservedRunningTime="2025-11-26 11:13:05.580435948 +0000 UTC m=+145.171647470" Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.635860 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:05 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:05 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:05 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:05 crc kubenswrapper[4622]: I1126 11:13:05.635922 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.591289 4622 generic.go:334] "Generic (PLEG): container finished" podID="b93c2d6c-1d6a-4784-84c4-0255edb84061" containerID="e5c344f202061c34c910fc2493140b2f013af530ee6852d5f7524315253e5825" exitCode=0 Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.591521 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b93c2d6c-1d6a-4784-84c4-0255edb84061","Type":"ContainerDied","Data":"e5c344f202061c34c910fc2493140b2f013af530ee6852d5f7524315253e5825"} Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.609798 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.609868 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.609936 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.609958 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.610843 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.618890 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.620834 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.629086 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.633067 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:06 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:06 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:06 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.633131 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.816877 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.822907 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 26 11:13:06 crc kubenswrapper[4622]: I1126 11:13:06.835969 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.401479 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.401668 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.403413 4622 patch_prober.go:28] interesting pod/console-f9d7485db-r2tzg container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.403478 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-r2tzg" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerName="console" probeResult="failure" output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.422050 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.427768 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8g8bw" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.546122 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 26 11:13:07 crc kubenswrapper[4622]: E1126 11:13:07.546386 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db268a7-30fd-4031-8eba-72d60056bbbd" containerName="collect-profiles" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.546401 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db268a7-30fd-4031-8eba-72d60056bbbd" containerName="collect-profiles" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.546525 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="3db268a7-30fd-4031-8eba-72d60056bbbd" containerName="collect-profiles" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.553358 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.553461 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.557215 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.557458 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.625224 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.625349 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.630375 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.632792 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:07 crc kubenswrapper[4622]: [-]has-synced failed: reason withheld Nov 26 11:13:07 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:07 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.632839 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.727140 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.727742 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.727818 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.762864 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.884633 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:07 crc kubenswrapper[4622]: I1126 11:13:07.955788 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-5lz6m" Nov 26 11:13:08 crc kubenswrapper[4622]: I1126 11:13:08.633132 4622 patch_prober.go:28] interesting pod/router-default-5444994796-w7zs4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 26 11:13:08 crc kubenswrapper[4622]: [+]has-synced ok Nov 26 11:13:08 crc kubenswrapper[4622]: [+]process-running ok Nov 26 11:13:08 crc kubenswrapper[4622]: healthz check failed Nov 26 11:13:08 crc kubenswrapper[4622]: I1126 11:13:08.633369 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-w7zs4" podUID="1e710e37-8d2a-43f6-9e8f-7eb3233eb276" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 26 11:13:09 crc kubenswrapper[4622]: I1126 11:13:09.638483 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:13:09 crc kubenswrapper[4622]: I1126 11:13:09.643987 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-w7zs4" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.025900 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9vzz6" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.411488 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.585697 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b93c2d6c-1d6a-4784-84c4-0255edb84061" (UID: "b93c2d6c-1d6a-4784-84c4-0255edb84061"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.586125 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir\") pod \"b93c2d6c-1d6a-4784-84c4-0255edb84061\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.586384 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access\") pod \"b93c2d6c-1d6a-4784-84c4-0255edb84061\" (UID: \"b93c2d6c-1d6a-4784-84c4-0255edb84061\") " Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.587773 4622 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b93c2d6c-1d6a-4784-84c4-0255edb84061-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.589978 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b93c2d6c-1d6a-4784-84c4-0255edb84061" (UID: "b93c2d6c-1d6a-4784-84c4-0255edb84061"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.679829 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b93c2d6c-1d6a-4784-84c4-0255edb84061","Type":"ContainerDied","Data":"d0979e0b96e67f1474188667406139e4235ed183c7b831c3ea2b666f16ef4c77"} Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.679873 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.679881 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0979e0b96e67f1474188667406139e4235ed183c7b831c3ea2b666f16ef4c77" Nov 26 11:13:10 crc kubenswrapper[4622]: I1126 11:13:10.689555 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93c2d6c-1d6a-4784-84c4-0255edb84061-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.339402 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 26 11:13:11 crc kubenswrapper[4622]: W1126 11:13:11.348655 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod15223db2_233f_4352_849e_783794b765a0.slice/crio-b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d WatchSource:0}: Error finding container b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d: Status 404 returned error can't find the container with id b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d Nov 26 11:13:11 crc kubenswrapper[4622]: W1126 11:13:11.418515 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-22d54a6ece5ae897d5ac4c425776c3f9e00c1ef32572fc61c21431670a001362 WatchSource:0}: Error finding container 22d54a6ece5ae897d5ac4c425776c3f9e00c1ef32572fc61c21431670a001362: Status 404 returned error can't find the container with id 22d54a6ece5ae897d5ac4c425776c3f9e00c1ef32572fc61c21431670a001362 Nov 26 11:13:11 crc kubenswrapper[4622]: W1126 11:13:11.468933 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-926cc85fdb26c775a49fa7033ae3309c37571953b4c31f7ba84af4ec39804cfd WatchSource:0}: Error finding container 926cc85fdb26c775a49fa7033ae3309c37571953b4c31f7ba84af4ec39804cfd: Status 404 returned error can't find the container with id 926cc85fdb26c775a49fa7033ae3309c37571953b4c31f7ba84af4ec39804cfd Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.694352 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9e81b298c0e9eaa9f4ec7c69406e2bdfbd1203fbac3778201cca8ad628babe05"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.694720 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"22d54a6ece5ae897d5ac4c425776c3f9e00c1ef32572fc61c21431670a001362"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.696849 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"15223db2-233f-4352-849e-783794b765a0","Type":"ContainerStarted","Data":"c2efbbd179b851b5cb1451e62fa32c13a0c207e9320e523d3ae30e03b28f47d7"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.696894 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"15223db2-233f-4352-849e-783794b765a0","Type":"ContainerStarted","Data":"b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.701977 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aaab0653218813c7544c1e311e262307beac5ff903a08e3ca375bc8300ba6c18"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.702006 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"926cc85fdb26c775a49fa7033ae3309c37571953b4c31f7ba84af4ec39804cfd"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.702319 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.703851 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"bc384ee8822d656f4d2e95b1e73e34db8d7dfe4e6a611ea8e538f778ff61eca3"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.703876 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"853bea90b5823879f50291f4f462a704167cc096a5dd406b6f0a6e98b51f930a"} Nov 26 11:13:11 crc kubenswrapper[4622]: I1126 11:13:11.735377 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.73535546 podStartE2EDuration="4.73535546s" podCreationTimestamp="2025-11-26 11:13:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:11.720715882 +0000 UTC m=+151.311927405" watchObservedRunningTime="2025-11-26 11:13:11.73535546 +0000 UTC m=+151.326566982" Nov 26 11:13:12 crc kubenswrapper[4622]: I1126 11:13:12.721722 4622 generic.go:334] "Generic (PLEG): container finished" podID="15223db2-233f-4352-849e-783794b765a0" containerID="c2efbbd179b851b5cb1451e62fa32c13a0c207e9320e523d3ae30e03b28f47d7" exitCode=0 Nov 26 11:13:12 crc kubenswrapper[4622]: I1126 11:13:12.721835 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"15223db2-233f-4352-849e-783794b765a0","Type":"ContainerDied","Data":"c2efbbd179b851b5cb1451e62fa32c13a0c207e9320e523d3ae30e03b28f47d7"} Nov 26 11:13:15 crc kubenswrapper[4622]: I1126 11:13:15.198930 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:13:15 crc kubenswrapper[4622]: I1126 11:13:15.199203 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.216466 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.296833 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access\") pod \"15223db2-233f-4352-849e-783794b765a0\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.296938 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir\") pod \"15223db2-233f-4352-849e-783794b765a0\" (UID: \"15223db2-233f-4352-849e-783794b765a0\") " Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.297068 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "15223db2-233f-4352-849e-783794b765a0" (UID: "15223db2-233f-4352-849e-783794b765a0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.297458 4622 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/15223db2-233f-4352-849e-783794b765a0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.303195 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "15223db2-233f-4352-849e-783794b765a0" (UID: "15223db2-233f-4352-849e-783794b765a0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.399145 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15223db2-233f-4352-849e-783794b765a0-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.406813 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.411970 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.760193 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.760394 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"15223db2-233f-4352-849e-783794b765a0","Type":"ContainerDied","Data":"b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d"} Nov 26 11:13:17 crc kubenswrapper[4622]: I1126 11:13:17.760443 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5b5cc9e16d82a4c45b503940edbf242360efd436c18725fe4759a4a5b5a911d" Nov 26 11:13:20 crc kubenswrapper[4622]: I1126 11:13:20.243091 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:13:20 crc kubenswrapper[4622]: I1126 11:13:20.250348 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f23a93f9-15cf-4dfd-802d-4b6bd04bbf81-metrics-certs\") pod \"network-metrics-daemon-z78ph\" (UID: \"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81\") " pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:13:20 crc kubenswrapper[4622]: I1126 11:13:20.330752 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-z78ph" Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.784850 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerStarted","Data":"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.793117 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerID="91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5" exitCode=0 Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.793199 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerDied","Data":"91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.793493 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-z78ph"] Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.795930 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerStarted","Data":"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.798068 4622 generic.go:334] "Generic (PLEG): container finished" podID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerID="1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b" exitCode=0 Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.798121 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerDied","Data":"1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.802887 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerStarted","Data":"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.814203 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerStarted","Data":"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.816454 4622 generic.go:334] "Generic (PLEG): container finished" podID="acccae42-8133-475f-ad53-dbfa434e5e45" containerID="83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a" exitCode=0 Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.816532 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerDied","Data":"83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a"} Nov 26 11:13:22 crc kubenswrapper[4622]: I1126 11:13:22.821033 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerStarted","Data":"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39"} Nov 26 11:13:22 crc kubenswrapper[4622]: W1126 11:13:22.980629 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf23a93f9_15cf_4dfd_802d_4b6bd04bbf81.slice/crio-fb9c88accf2332f8bc9859f9a84e060bb4c857f40f0241c019ca8ef907466914 WatchSource:0}: Error finding container fb9c88accf2332f8bc9859f9a84e060bb4c857f40f0241c019ca8ef907466914: Status 404 returned error can't find the container with id fb9c88accf2332f8bc9859f9a84e060bb4c857f40f0241c019ca8ef907466914 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.195224 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.828277 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerStarted","Data":"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.830203 4622 generic.go:334] "Generic (PLEG): container finished" podID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerID="65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39" exitCode=0 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.830319 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerDied","Data":"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.832716 4622 generic.go:334] "Generic (PLEG): container finished" podID="af122252-a014-4afb-b751-bd4e53793b53" containerID="eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7" exitCode=0 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.832772 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerDied","Data":"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.836714 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerStarted","Data":"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.839587 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerStarted","Data":"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.841251 4622 generic.go:334] "Generic (PLEG): container finished" podID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerID="baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c" exitCode=0 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.841335 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerDied","Data":"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.843027 4622 generic.go:334] "Generic (PLEG): container finished" podID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerID="5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae" exitCode=0 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.843056 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerDied","Data":"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.844816 4622 generic.go:334] "Generic (PLEG): container finished" podID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerID="60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c" exitCode=0 Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.844895 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerDied","Data":"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.846614 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-z78ph" event={"ID":"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81","Type":"ContainerStarted","Data":"70d82dcf2258112eadc9e8277e91372e3a79e4b35d3ec6cbf86662e1c5e0659e"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.846647 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-z78ph" event={"ID":"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81","Type":"ContainerStarted","Data":"2c8b65e464789b7fcee198636e66f8e47a7ac0090bf765b57b7bef24b9d9de5e"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.846657 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-z78ph" event={"ID":"f23a93f9-15cf-4dfd-802d-4b6bd04bbf81","Type":"ContainerStarted","Data":"fb9c88accf2332f8bc9859f9a84e060bb4c857f40f0241c019ca8ef907466914"} Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.853877 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d4zmf" podStartSLOduration=3.9381406009999997 podStartE2EDuration="24.853849635s" podCreationTimestamp="2025-11-26 11:12:59 +0000 UTC" firstStartedPulling="2025-11-26 11:13:02.419650244 +0000 UTC m=+142.010861766" lastFinishedPulling="2025-11-26 11:13:23.335359277 +0000 UTC m=+162.926570800" observedRunningTime="2025-11-26 11:13:23.850211599 +0000 UTC m=+163.441423121" watchObservedRunningTime="2025-11-26 11:13:23.853849635 +0000 UTC m=+163.445061156" Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.880379 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-z78ph" podStartSLOduration=144.880360886 podStartE2EDuration="2m24.880360886s" podCreationTimestamp="2025-11-26 11:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:23.874947764 +0000 UTC m=+163.466159287" watchObservedRunningTime="2025-11-26 11:13:23.880360886 +0000 UTC m=+163.471572408" Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.937204 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xcvpx" podStartSLOduration=3.045311559 podStartE2EDuration="22.937179921s" podCreationTimestamp="2025-11-26 11:13:01 +0000 UTC" firstStartedPulling="2025-11-26 11:13:03.523480596 +0000 UTC m=+143.114692119" lastFinishedPulling="2025-11-26 11:13:23.415348959 +0000 UTC m=+163.006560481" observedRunningTime="2025-11-26 11:13:23.936792221 +0000 UTC m=+163.528003743" watchObservedRunningTime="2025-11-26 11:13:23.937179921 +0000 UTC m=+163.528391443" Nov 26 11:13:23 crc kubenswrapper[4622]: I1126 11:13:23.998758 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5bd7z" podStartSLOduration=3.175446921 podStartE2EDuration="22.998740162s" podCreationTimestamp="2025-11-26 11:13:01 +0000 UTC" firstStartedPulling="2025-11-26 11:13:03.493377037 +0000 UTC m=+143.084588559" lastFinishedPulling="2025-11-26 11:13:23.316670278 +0000 UTC m=+162.907881800" observedRunningTime="2025-11-26 11:13:23.99712055 +0000 UTC m=+163.588332071" watchObservedRunningTime="2025-11-26 11:13:23.998740162 +0000 UTC m=+163.589951683" Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.855167 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerStarted","Data":"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a"} Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.857430 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerStarted","Data":"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0"} Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.859413 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerStarted","Data":"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11"} Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.862145 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerStarted","Data":"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8"} Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.896967 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6j9ln" podStartSLOduration=3.851594105 podStartE2EDuration="25.896944712s" podCreationTimestamp="2025-11-26 11:12:59 +0000 UTC" firstStartedPulling="2025-11-26 11:13:02.306672477 +0000 UTC m=+141.897883999" lastFinishedPulling="2025-11-26 11:13:24.352023085 +0000 UTC m=+163.943234606" observedRunningTime="2025-11-26 11:13:24.895434335 +0000 UTC m=+164.486645848" watchObservedRunningTime="2025-11-26 11:13:24.896944712 +0000 UTC m=+164.488156233" Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.898896 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ts6wt" podStartSLOduration=2.988724315 podStartE2EDuration="22.898890168s" podCreationTimestamp="2025-11-26 11:13:02 +0000 UTC" firstStartedPulling="2025-11-26 11:13:04.535071315 +0000 UTC m=+144.126282836" lastFinishedPulling="2025-11-26 11:13:24.445237167 +0000 UTC m=+164.036448689" observedRunningTime="2025-11-26 11:13:24.876790292 +0000 UTC m=+164.468001824" watchObservedRunningTime="2025-11-26 11:13:24.898890168 +0000 UTC m=+164.490101690" Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.917605 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wnxq7" podStartSLOduration=3.035544958 podStartE2EDuration="22.917587874s" podCreationTimestamp="2025-11-26 11:13:02 +0000 UTC" firstStartedPulling="2025-11-26 11:13:04.542539558 +0000 UTC m=+144.133751080" lastFinishedPulling="2025-11-26 11:13:24.424582474 +0000 UTC m=+164.015793996" observedRunningTime="2025-11-26 11:13:24.915620907 +0000 UTC m=+164.506832439" watchObservedRunningTime="2025-11-26 11:13:24.917587874 +0000 UTC m=+164.508799396" Nov 26 11:13:24 crc kubenswrapper[4622]: I1126 11:13:24.931068 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l97bl" podStartSLOduration=3.7437259960000002 podStartE2EDuration="25.931046006s" podCreationTimestamp="2025-11-26 11:12:59 +0000 UTC" firstStartedPulling="2025-11-26 11:13:02.272669999 +0000 UTC m=+141.863881521" lastFinishedPulling="2025-11-26 11:13:24.459990009 +0000 UTC m=+164.051201531" observedRunningTime="2025-11-26 11:13:24.927964049 +0000 UTC m=+164.519175570" watchObservedRunningTime="2025-11-26 11:13:24.931046006 +0000 UTC m=+164.522257528" Nov 26 11:13:25 crc kubenswrapper[4622]: I1126 11:13:25.868674 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerStarted","Data":"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c"} Nov 26 11:13:25 crc kubenswrapper[4622]: I1126 11:13:25.886835 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7zqqd" podStartSLOduration=4.55721521 podStartE2EDuration="26.886814025s" podCreationTimestamp="2025-11-26 11:12:59 +0000 UTC" firstStartedPulling="2025-11-26 11:13:02.437423999 +0000 UTC m=+142.028635521" lastFinishedPulling="2025-11-26 11:13:24.767022814 +0000 UTC m=+164.358234336" observedRunningTime="2025-11-26 11:13:25.883220673 +0000 UTC m=+165.474432195" watchObservedRunningTime="2025-11-26 11:13:25.886814025 +0000 UTC m=+165.478025547" Nov 26 11:13:29 crc kubenswrapper[4622]: I1126 11:13:29.692894 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:13:29 crc kubenswrapper[4622]: I1126 11:13:29.693276 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:13:29 crc kubenswrapper[4622]: I1126 11:13:29.778202 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:13:29 crc kubenswrapper[4622]: I1126 11:13:29.923442 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.099086 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.099145 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.136017 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.199789 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.199827 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.230276 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.351285 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.351361 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.383810 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.924488 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.926548 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:13:30 crc kubenswrapper[4622]: I1126 11:13:30.929228 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:31 crc kubenswrapper[4622]: I1126 11:13:31.822469 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:31 crc kubenswrapper[4622]: I1126 11:13:31.823188 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:31 crc kubenswrapper[4622]: I1126 11:13:31.863991 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:31 crc kubenswrapper[4622]: I1126 11:13:31.927053 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:13:32 crc kubenswrapper[4622]: I1126 11:13:32.108118 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:32 crc kubenswrapper[4622]: I1126 11:13:32.108185 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:32 crc kubenswrapper[4622]: I1126 11:13:32.140550 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:32 crc kubenswrapper[4622]: I1126 11:13:32.939150 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.014057 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.014115 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.014819 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.015016 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6j9ln" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="registry-server" containerID="cri-o://a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11" gracePeriod=2 Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.051288 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.216065 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.216608 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7zqqd" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="registry-server" containerID="cri-o://49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c" gracePeriod=2 Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.342904 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.381112 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.381343 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.412044 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.420092 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpcqw\" (UniqueName: \"kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw\") pod \"7dc1d52e-ff99-4495-8eef-bc54bf430361\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.420177 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities\") pod \"7dc1d52e-ff99-4495-8eef-bc54bf430361\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.420232 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content\") pod \"7dc1d52e-ff99-4495-8eef-bc54bf430361\" (UID: \"7dc1d52e-ff99-4495-8eef-bc54bf430361\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.421125 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities" (OuterVolumeSpecName: "utilities") pod "7dc1d52e-ff99-4495-8eef-bc54bf430361" (UID: "7dc1d52e-ff99-4495-8eef-bc54bf430361"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.430278 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw" (OuterVolumeSpecName: "kube-api-access-tpcqw") pod "7dc1d52e-ff99-4495-8eef-bc54bf430361" (UID: "7dc1d52e-ff99-4495-8eef-bc54bf430361"). InnerVolumeSpecName "kube-api-access-tpcqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.512621 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dc1d52e-ff99-4495-8eef-bc54bf430361" (UID: "7dc1d52e-ff99-4495-8eef-bc54bf430361"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.521521 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpcqw\" (UniqueName: \"kubernetes.io/projected/7dc1d52e-ff99-4495-8eef-bc54bf430361-kube-api-access-tpcqw\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.521558 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.521570 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dc1d52e-ff99-4495-8eef-bc54bf430361-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.527856 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.622339 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content\") pod \"af122252-a014-4afb-b751-bd4e53793b53\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.622512 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps4zn\" (UniqueName: \"kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn\") pod \"af122252-a014-4afb-b751-bd4e53793b53\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.622557 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities\") pod \"af122252-a014-4afb-b751-bd4e53793b53\" (UID: \"af122252-a014-4afb-b751-bd4e53793b53\") " Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.623310 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities" (OuterVolumeSpecName: "utilities") pod "af122252-a014-4afb-b751-bd4e53793b53" (UID: "af122252-a014-4afb-b751-bd4e53793b53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.625017 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn" (OuterVolumeSpecName: "kube-api-access-ps4zn") pod "af122252-a014-4afb-b751-bd4e53793b53" (UID: "af122252-a014-4afb-b751-bd4e53793b53"). InnerVolumeSpecName "kube-api-access-ps4zn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.660017 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af122252-a014-4afb-b751-bd4e53793b53" (UID: "af122252-a014-4afb-b751-bd4e53793b53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.723907 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.723949 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps4zn\" (UniqueName: \"kubernetes.io/projected/af122252-a014-4afb-b751-bd4e53793b53-kube-api-access-ps4zn\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.723965 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af122252-a014-4afb-b751-bd4e53793b53-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.914987 4622 generic.go:334] "Generic (PLEG): container finished" podID="af122252-a014-4afb-b751-bd4e53793b53" containerID="49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c" exitCode=0 Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.915101 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerDied","Data":"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c"} Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.915194 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7zqqd" event={"ID":"af122252-a014-4afb-b751-bd4e53793b53","Type":"ContainerDied","Data":"8ccf4938f8a0eec88b77a247d0f1234adf0bf90818f88af9150276532b8d3f30"} Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.915218 4622 scope.go:117] "RemoveContainer" containerID="49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.915560 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7zqqd" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.918486 4622 generic.go:334] "Generic (PLEG): container finished" podID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerID="a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11" exitCode=0 Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.918547 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerDied","Data":"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11"} Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.918613 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6j9ln" event={"ID":"7dc1d52e-ff99-4495-8eef-bc54bf430361","Type":"ContainerDied","Data":"8dd8ac5dd9db73e2ed5b3a434583d74b5983698f2cbfc3ca579ec4ee0a33c00d"} Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.918787 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6j9ln" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.932774 4622 scope.go:117] "RemoveContainer" containerID="eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.956873 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.958417 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.958475 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.959284 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7zqqd"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.966890 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.967165 4622 scope.go:117] "RemoveContainer" containerID="7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.968881 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6j9ln"] Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.983942 4622 scope.go:117] "RemoveContainer" containerID="49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c" Nov 26 11:13:33 crc kubenswrapper[4622]: E1126 11:13:33.984328 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c\": container with ID starting with 49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c not found: ID does not exist" containerID="49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.984367 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c"} err="failed to get container status \"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c\": rpc error: code = NotFound desc = could not find container \"49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c\": container with ID starting with 49dbbbf359e8b872457b21f4457724b97ad01e2ae1a1bba93353d4c450e5919c not found: ID does not exist" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.984410 4622 scope.go:117] "RemoveContainer" containerID="eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7" Nov 26 11:13:33 crc kubenswrapper[4622]: E1126 11:13:33.984714 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7\": container with ID starting with eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7 not found: ID does not exist" containerID="eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.984749 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7"} err="failed to get container status \"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7\": rpc error: code = NotFound desc = could not find container \"eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7\": container with ID starting with eda7884ed1f4000f610fa581413dcf12fba691db762751315b5b8c3feea424a7 not found: ID does not exist" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.984771 4622 scope.go:117] "RemoveContainer" containerID="7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2" Nov 26 11:13:33 crc kubenswrapper[4622]: E1126 11:13:33.985033 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2\": container with ID starting with 7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2 not found: ID does not exist" containerID="7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.985058 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2"} err="failed to get container status \"7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2\": rpc error: code = NotFound desc = could not find container \"7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2\": container with ID starting with 7c9981dce6ff9dbf5a860be805628bd0e4e8ae874942e2a01b121defd71b63e2 not found: ID does not exist" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.985076 4622 scope.go:117] "RemoveContainer" containerID="a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11" Nov 26 11:13:33 crc kubenswrapper[4622]: I1126 11:13:33.999721 4622 scope.go:117] "RemoveContainer" containerID="5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.017665 4622 scope.go:117] "RemoveContainer" containerID="de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.031825 4622 scope.go:117] "RemoveContainer" containerID="a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11" Nov 26 11:13:34 crc kubenswrapper[4622]: E1126 11:13:34.032437 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11\": container with ID starting with a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11 not found: ID does not exist" containerID="a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.032470 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11"} err="failed to get container status \"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11\": rpc error: code = NotFound desc = could not find container \"a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11\": container with ID starting with a0d49df7f94f6942cfa4f75c1248b6b360827c345602d4718a065f5345abea11 not found: ID does not exist" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.032491 4622 scope.go:117] "RemoveContainer" containerID="5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae" Nov 26 11:13:34 crc kubenswrapper[4622]: E1126 11:13:34.034071 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae\": container with ID starting with 5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae not found: ID does not exist" containerID="5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.034111 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae"} err="failed to get container status \"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae\": rpc error: code = NotFound desc = could not find container \"5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae\": container with ID starting with 5ff0014874fa51e47179acd96b71bc8c6aa505d323617d036bb86bc91ce7b3ae not found: ID does not exist" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.034141 4622 scope.go:117] "RemoveContainer" containerID="de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae" Nov 26 11:13:34 crc kubenswrapper[4622]: E1126 11:13:34.034386 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae\": container with ID starting with de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae not found: ID does not exist" containerID="de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.034410 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae"} err="failed to get container status \"de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae\": rpc error: code = NotFound desc = could not find container \"de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae\": container with ID starting with de871602004c640ad832e087c366a08e2d7973179e955fd224dd12bfa18647ae not found: ID does not exist" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.716999 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" path="/var/lib/kubelet/pods/7dc1d52e-ff99-4495-8eef-bc54bf430361/volumes" Nov 26 11:13:34 crc kubenswrapper[4622]: I1126 11:13:34.717717 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af122252-a014-4afb-b751-bd4e53793b53" path="/var/lib/kubelet/pods/af122252-a014-4afb-b751-bd4e53793b53/volumes" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.417602 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.417850 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xcvpx" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="registry-server" containerID="cri-o://1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046" gracePeriod=2 Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.617443 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.779696 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.851860 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hf8d\" (UniqueName: \"kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d\") pod \"9b268297-0c05-4b22-acfd-a2dc214d44d3\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.851949 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities\") pod \"9b268297-0c05-4b22-acfd-a2dc214d44d3\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.852162 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content\") pod \"9b268297-0c05-4b22-acfd-a2dc214d44d3\" (UID: \"9b268297-0c05-4b22-acfd-a2dc214d44d3\") " Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.852908 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities" (OuterVolumeSpecName: "utilities") pod "9b268297-0c05-4b22-acfd-a2dc214d44d3" (UID: "9b268297-0c05-4b22-acfd-a2dc214d44d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.856963 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d" (OuterVolumeSpecName: "kube-api-access-5hf8d") pod "9b268297-0c05-4b22-acfd-a2dc214d44d3" (UID: "9b268297-0c05-4b22-acfd-a2dc214d44d3"). InnerVolumeSpecName "kube-api-access-5hf8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.867303 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b268297-0c05-4b22-acfd-a2dc214d44d3" (UID: "9b268297-0c05-4b22-acfd-a2dc214d44d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.940402 4622 generic.go:334] "Generic (PLEG): container finished" podID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerID="1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046" exitCode=0 Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.940480 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcvpx" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.940494 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerDied","Data":"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046"} Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.940575 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcvpx" event={"ID":"9b268297-0c05-4b22-acfd-a2dc214d44d3","Type":"ContainerDied","Data":"61ca90ac0079ea6875e01d5e9dae52a2f1bdd3660c2baca11ea1a752bd47c757"} Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.940598 4622 scope.go:117] "RemoveContainer" containerID="1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.954136 4622 scope.go:117] "RemoveContainer" containerID="1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.954970 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.955010 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hf8d\" (UniqueName: \"kubernetes.io/projected/9b268297-0c05-4b22-acfd-a2dc214d44d3-kube-api-access-5hf8d\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.955032 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b268297-0c05-4b22-acfd-a2dc214d44d3-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.966923 4622 scope.go:117] "RemoveContainer" containerID="1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.974085 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.977420 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcvpx"] Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.996729 4622 scope.go:117] "RemoveContainer" containerID="1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046" Nov 26 11:13:35 crc kubenswrapper[4622]: E1126 11:13:35.997545 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046\": container with ID starting with 1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046 not found: ID does not exist" containerID="1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.997602 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046"} err="failed to get container status \"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046\": rpc error: code = NotFound desc = could not find container \"1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046\": container with ID starting with 1439e4b25a0c96c55ca913397ed6f986d2e7464f005998abcdc240b0cc352046 not found: ID does not exist" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.997633 4622 scope.go:117] "RemoveContainer" containerID="1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b" Nov 26 11:13:35 crc kubenswrapper[4622]: E1126 11:13:35.998001 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b\": container with ID starting with 1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b not found: ID does not exist" containerID="1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.998033 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b"} err="failed to get container status \"1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b\": rpc error: code = NotFound desc = could not find container \"1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b\": container with ID starting with 1d98bb7ba682ec019bdc55764970f2f0ea6636bbf6f8e4df1500e8a03dfdbf4b not found: ID does not exist" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.998054 4622 scope.go:117] "RemoveContainer" containerID="1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2" Nov 26 11:13:35 crc kubenswrapper[4622]: E1126 11:13:35.998314 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2\": container with ID starting with 1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2 not found: ID does not exist" containerID="1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2" Nov 26 11:13:35 crc kubenswrapper[4622]: I1126 11:13:35.998349 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2"} err="failed to get container status \"1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2\": rpc error: code = NotFound desc = could not find container \"1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2\": container with ID starting with 1ee9d59b4db930a2b860f8b26e02d3ecdb61eb527d148985215e9b26d3c099f2 not found: ID does not exist" Nov 26 11:13:36 crc kubenswrapper[4622]: I1126 11:13:36.713565 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" path="/var/lib/kubelet/pods/9b268297-0c05-4b22-acfd-a2dc214d44d3/volumes" Nov 26 11:13:36 crc kubenswrapper[4622]: I1126 11:13:36.947941 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ts6wt" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="registry-server" containerID="cri-o://5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a" gracePeriod=2 Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.271484 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.372677 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities\") pod \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.372805 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content\") pod \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.372859 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqcns\" (UniqueName: \"kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns\") pod \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\" (UID: \"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f\") " Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.373658 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities" (OuterVolumeSpecName: "utilities") pod "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" (UID: "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.376589 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns" (OuterVolumeSpecName: "kube-api-access-rqcns") pod "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" (UID: "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f"). InnerVolumeSpecName "kube-api-access-rqcns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.444534 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" (UID: "0edf3634-60b1-4d74-a80b-9e49fb4f5e1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.474160 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.474192 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqcns\" (UniqueName: \"kubernetes.io/projected/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-kube-api-access-rqcns\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.474207 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.657661 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zf22z" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.957543 4622 generic.go:334] "Generic (PLEG): container finished" podID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerID="5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a" exitCode=0 Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.957597 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts6wt" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.957621 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerDied","Data":"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a"} Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.957870 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts6wt" event={"ID":"0edf3634-60b1-4d74-a80b-9e49fb4f5e1f","Type":"ContainerDied","Data":"88ed69733157e22ac77d885af0f383c140894c503203656fd3770cf8c833b7ab"} Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.957892 4622 scope.go:117] "RemoveContainer" containerID="5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.971559 4622 scope.go:117] "RemoveContainer" containerID="65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39" Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.980053 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:37 crc kubenswrapper[4622]: I1126 11:13:37.983785 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ts6wt"] Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.010071 4622 scope.go:117] "RemoveContainer" containerID="b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.021471 4622 scope.go:117] "RemoveContainer" containerID="5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a" Nov 26 11:13:38 crc kubenswrapper[4622]: E1126 11:13:38.021927 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a\": container with ID starting with 5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a not found: ID does not exist" containerID="5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.021969 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a"} err="failed to get container status \"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a\": rpc error: code = NotFound desc = could not find container \"5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a\": container with ID starting with 5cfc96e5ebf7c2e22343641870a896a80e7aed3652f2318e35ed6f02ae8a273a not found: ID does not exist" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.021996 4622 scope.go:117] "RemoveContainer" containerID="65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39" Nov 26 11:13:38 crc kubenswrapper[4622]: E1126 11:13:38.022366 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39\": container with ID starting with 65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39 not found: ID does not exist" containerID="65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.022403 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39"} err="failed to get container status \"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39\": rpc error: code = NotFound desc = could not find container \"65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39\": container with ID starting with 65b2bdbb77fc22041cef14d0e5008166f85fbb9520a0cf4bb840404ba0e65f39 not found: ID does not exist" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.022425 4622 scope.go:117] "RemoveContainer" containerID="b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b" Nov 26 11:13:38 crc kubenswrapper[4622]: E1126 11:13:38.022683 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b\": container with ID starting with b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b not found: ID does not exist" containerID="b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.022707 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b"} err="failed to get container status \"b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b\": rpc error: code = NotFound desc = could not find container \"b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b\": container with ID starting with b98ed0200792d9cd962686139f86ab54e5419e44a77487b675aac80a4b05760b not found: ID does not exist" Nov 26 11:13:38 crc kubenswrapper[4622]: I1126 11:13:38.718774 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" path="/var/lib/kubelet/pods/0edf3634-60b1-4d74-a80b-9e49fb4f5e1f/volumes" Nov 26 11:13:45 crc kubenswrapper[4622]: I1126 11:13:45.199364 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:13:45 crc kubenswrapper[4622]: I1126 11:13:45.200098 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:13:45 crc kubenswrapper[4622]: I1126 11:13:45.784210 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502605 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502836 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502850 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502861 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502866 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502874 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502879 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502893 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502898 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502904 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502910 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502918 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502924 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502931 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502936 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502945 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502950 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502958 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15223db2-233f-4352-849e-783794b765a0" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502963 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="15223db2-233f-4352-849e-783794b765a0" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502970 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502975 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502982 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.502987 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.502996 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503006 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="extract-content" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.503012 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503017 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="extract-utilities" Nov 26 11:13:46 crc kubenswrapper[4622]: E1126 11:13:46.503023 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b93c2d6c-1d6a-4784-84c4-0255edb84061" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503028 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b93c2d6c-1d6a-4784-84c4-0255edb84061" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503117 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b93c2d6c-1d6a-4784-84c4-0255edb84061" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503126 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b268297-0c05-4b22-acfd-a2dc214d44d3" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503139 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc1d52e-ff99-4495-8eef-bc54bf430361" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503147 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="15223db2-233f-4352-849e-783794b765a0" containerName="pruner" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503153 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="af122252-a014-4afb-b751-bd4e53793b53" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503160 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0edf3634-60b1-4d74-a80b-9e49fb4f5e1f" containerName="registry-server" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.503567 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.506180 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.506726 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.517155 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.579825 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.579937 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.681466 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.681547 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.681858 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.699208 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.816320 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:46 crc kubenswrapper[4622]: I1126 11:13:46.822782 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 26 11:13:47 crc kubenswrapper[4622]: I1126 11:13:47.195455 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 26 11:13:47 crc kubenswrapper[4622]: W1126 11:13:47.204593 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda1658221_c2ac_40ee_8595_f3ae684888fb.slice/crio-dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca WatchSource:0}: Error finding container dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca: Status 404 returned error can't find the container with id dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca Nov 26 11:13:48 crc kubenswrapper[4622]: I1126 11:13:48.009940 4622 generic.go:334] "Generic (PLEG): container finished" podID="a1658221-c2ac-40ee-8595-f3ae684888fb" containerID="d7918bb7c6192b30a927cb5bf331405e6e6d162fb9de81535f688c4897c037db" exitCode=0 Nov 26 11:13:48 crc kubenswrapper[4622]: I1126 11:13:48.010164 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a1658221-c2ac-40ee-8595-f3ae684888fb","Type":"ContainerDied","Data":"d7918bb7c6192b30a927cb5bf331405e6e6d162fb9de81535f688c4897c037db"} Nov 26 11:13:48 crc kubenswrapper[4622]: I1126 11:13:48.010255 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a1658221-c2ac-40ee-8595-f3ae684888fb","Type":"ContainerStarted","Data":"dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca"} Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.181904 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.211853 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir\") pod \"a1658221-c2ac-40ee-8595-f3ae684888fb\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.211999 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access\") pod \"a1658221-c2ac-40ee-8595-f3ae684888fb\" (UID: \"a1658221-c2ac-40ee-8595-f3ae684888fb\") " Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.212135 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a1658221-c2ac-40ee-8595-f3ae684888fb" (UID: "a1658221-c2ac-40ee-8595-f3ae684888fb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.212288 4622 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1658221-c2ac-40ee-8595-f3ae684888fb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.216718 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a1658221-c2ac-40ee-8595-f3ae684888fb" (UID: "a1658221-c2ac-40ee-8595-f3ae684888fb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:13:49 crc kubenswrapper[4622]: I1126 11:13:49.313712 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1658221-c2ac-40ee-8595-f3ae684888fb-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:13:50 crc kubenswrapper[4622]: I1126 11:13:50.020452 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a1658221-c2ac-40ee-8595-f3ae684888fb","Type":"ContainerDied","Data":"dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca"} Nov 26 11:13:50 crc kubenswrapper[4622]: I1126 11:13:50.020695 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd284db39ad196d3c7b6a09de4fbea500447acf508619892bd76567bc5dadeca" Nov 26 11:13:50 crc kubenswrapper[4622]: I1126 11:13:50.020516 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.097091 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 26 11:13:51 crc kubenswrapper[4622]: E1126 11:13:51.097340 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1658221-c2ac-40ee-8595-f3ae684888fb" containerName="pruner" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.097355 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1658221-c2ac-40ee-8595-f3ae684888fb" containerName="pruner" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.097452 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1658221-c2ac-40ee-8595-f3ae684888fb" containerName="pruner" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.097839 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.099872 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.100864 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.109726 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.135579 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.135644 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.135769 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.237520 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.237595 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.237660 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.237694 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.237778 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.253339 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access\") pod \"installer-9-crc\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.410427 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:13:51 crc kubenswrapper[4622]: I1126 11:13:51.751968 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 26 11:13:52 crc kubenswrapper[4622]: I1126 11:13:52.041200 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"97944a6f-d7bf-4488-a74b-d70e36746f7f","Type":"ContainerStarted","Data":"5d67794c7a59e8ee08de61cedf907d6dcf7b652ad323f19edfb88cebe365c232"} Nov 26 11:13:52 crc kubenswrapper[4622]: I1126 11:13:52.041547 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"97944a6f-d7bf-4488-a74b-d70e36746f7f","Type":"ContainerStarted","Data":"15269ab327a266b852b7d2e8bb123af0f99f86422624d8bdd4ca4c93c71bed65"} Nov 26 11:13:52 crc kubenswrapper[4622]: I1126 11:13:52.054086 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.054071194 podStartE2EDuration="1.054071194s" podCreationTimestamp="2025-11-26 11:13:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:13:52.052024577 +0000 UTC m=+191.643236119" watchObservedRunningTime="2025-11-26 11:13:52.054071194 +0000 UTC m=+191.645282715" Nov 26 11:14:10 crc kubenswrapper[4622]: I1126 11:14:10.805203 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" podUID="af17650f-233a-479a-9152-082ca02f5cbb" containerName="oauth-openshift" containerID="cri-o://67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c" gracePeriod=15 Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.105058 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.131140 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-d6cd9cb6-md8mn"] Nov 26 11:14:11 crc kubenswrapper[4622]: E1126 11:14:11.131397 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af17650f-233a-479a-9152-082ca02f5cbb" containerName="oauth-openshift" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.131417 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="af17650f-233a-479a-9152-082ca02f5cbb" containerName="oauth-openshift" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.131550 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="af17650f-233a-479a-9152-082ca02f5cbb" containerName="oauth-openshift" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.131948 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.144208 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d6cd9cb6-md8mn"] Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.157593 4622 generic.go:334] "Generic (PLEG): container finished" podID="af17650f-233a-479a-9152-082ca02f5cbb" containerID="67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c" exitCode=0 Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.157634 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" event={"ID":"af17650f-233a-479a-9152-082ca02f5cbb","Type":"ContainerDied","Data":"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c"} Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.157648 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.157662 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b44gm" event={"ID":"af17650f-233a-479a-9152-082ca02f5cbb","Type":"ContainerDied","Data":"61e2b810d1d63d34c0274ee25f0b42751fd90d29e0e08f62993dfe7caea67d4c"} Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.157700 4622 scope.go:117] "RemoveContainer" containerID="67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.176529 4622 scope.go:117] "RemoveContainer" containerID="67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c" Nov 26 11:14:11 crc kubenswrapper[4622]: E1126 11:14:11.177419 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c\": container with ID starting with 67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c not found: ID does not exist" containerID="67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.177494 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c"} err="failed to get container status \"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c\": rpc error: code = NotFound desc = could not find container \"67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c\": container with ID starting with 67fd57112ca3793f4454906125f13f2a47dae77a9b6c003adc4191a68dd17e6c not found: ID does not exist" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203303 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203342 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203378 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203400 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203417 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203534 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-router-certs\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203563 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203583 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203600 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203629 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kwlm\" (UniqueName: \"kubernetes.io/projected/1846120d-0192-4626-8b0d-d9653339013e-kube-api-access-5kwlm\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203647 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-session\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203669 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203690 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203750 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1846120d-0192-4626-8b0d-d9653339013e-audit-dir\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203775 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-service-ca\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203793 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203815 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-login\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203835 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-audit-policies\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.203856 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-error\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.204349 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.204497 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.204588 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.209009 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.209693 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304719 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304788 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304822 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304841 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304865 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304872 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.304960 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjtl7\" (UniqueName: \"kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305031 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305052 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305076 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template\") pod \"af17650f-233a-479a-9152-082ca02f5cbb\" (UID: \"af17650f-233a-479a-9152-082ca02f5cbb\") " Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305246 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305290 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305333 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kwlm\" (UniqueName: \"kubernetes.io/projected/1846120d-0192-4626-8b0d-d9653339013e-kube-api-access-5kwlm\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305359 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-session\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305383 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305410 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305488 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1846120d-0192-4626-8b0d-d9653339013e-audit-dir\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305539 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-service-ca\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305562 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305584 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-login\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305609 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-audit-policies\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305628 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-error\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305659 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-router-certs\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305678 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305727 4622 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af17650f-233a-479a-9152-082ca02f5cbb-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305739 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305750 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305759 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305768 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.305785 4622 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.306582 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.307292 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-audit-policies\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.308655 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.309201 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.309477 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310234 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310327 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1846120d-0192-4626-8b0d-d9653339013e-audit-dir\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310482 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310594 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310622 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-session\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.310905 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.311101 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.311231 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.311283 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.311306 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-service-ca\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.311330 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-login\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.312315 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.312561 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-user-template-error\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.313067 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1846120d-0192-4626-8b0d-d9653339013e-v4-0-config-system-router-certs\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.313832 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.313988 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7" (OuterVolumeSpecName: "kube-api-access-fjtl7") pod "af17650f-233a-479a-9152-082ca02f5cbb" (UID: "af17650f-233a-479a-9152-082ca02f5cbb"). InnerVolumeSpecName "kube-api-access-fjtl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.319222 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kwlm\" (UniqueName: \"kubernetes.io/projected/1846120d-0192-4626-8b0d-d9653339013e-kube-api-access-5kwlm\") pod \"oauth-openshift-d6cd9cb6-md8mn\" (UID: \"1846120d-0192-4626-8b0d-d9653339013e\") " pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406440 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406479 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406490 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406526 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406536 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406545 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjtl7\" (UniqueName: \"kubernetes.io/projected/af17650f-233a-479a-9152-082ca02f5cbb-kube-api-access-fjtl7\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406555 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.406563 4622 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/af17650f-233a-479a-9152-082ca02f5cbb-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.447766 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.480944 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.483736 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b44gm"] Nov 26 11:14:11 crc kubenswrapper[4622]: I1126 11:14:11.795519 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d6cd9cb6-md8mn"] Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.164977 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" event={"ID":"1846120d-0192-4626-8b0d-d9653339013e","Type":"ContainerStarted","Data":"c9ed2d908bea169a7bfcb4f5911331d47c8a268a930a01b5815afa0d6dc132c2"} Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.165247 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" event={"ID":"1846120d-0192-4626-8b0d-d9653339013e","Type":"ContainerStarted","Data":"dfacab618c0535adff6f7b7c0db4498234c9ffc6ee17f93bcbdacdc1990b8b26"} Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.165698 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.184685 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" podStartSLOduration=27.184659544 podStartE2EDuration="27.184659544s" podCreationTimestamp="2025-11-26 11:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:14:12.182860089 +0000 UTC m=+211.774071611" watchObservedRunningTime="2025-11-26 11:14:12.184659544 +0000 UTC m=+211.775871066" Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.363747 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-d6cd9cb6-md8mn" Nov 26 11:14:12 crc kubenswrapper[4622]: I1126 11:14:12.712710 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af17650f-233a-479a-9152-082ca02f5cbb" path="/var/lib/kubelet/pods/af17650f-233a-479a-9152-082ca02f5cbb/volumes" Nov 26 11:14:15 crc kubenswrapper[4622]: I1126 11:14:15.199634 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:14:15 crc kubenswrapper[4622]: I1126 11:14:15.199697 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:14:15 crc kubenswrapper[4622]: I1126 11:14:15.199743 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:14:15 crc kubenswrapper[4622]: I1126 11:14:15.200153 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:14:15 crc kubenswrapper[4622]: I1126 11:14:15.200201 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4" gracePeriod=600 Nov 26 11:14:16 crc kubenswrapper[4622]: I1126 11:14:16.189874 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4" exitCode=0 Nov 26 11:14:16 crc kubenswrapper[4622]: I1126 11:14:16.189956 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4"} Nov 26 11:14:16 crc kubenswrapper[4622]: I1126 11:14:16.190617 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046"} Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.440984 4622 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.442429 4622 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.442624 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443096 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4" gracePeriod=15 Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443092 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd" gracePeriod=15 Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443241 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768" gracePeriod=15 Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443223 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d" gracePeriod=15 Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443327 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb" gracePeriod=15 Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443527 4622 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.443916 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443944 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.443958 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443964 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.443974 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.443980 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.443998 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444006 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.444013 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444019 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.444029 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444036 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444175 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444186 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444196 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444204 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444214 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444220 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.444323 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.444329 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455009 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455047 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455066 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455103 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455126 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455143 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455174 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.455200 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.481762 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556615 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556767 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556822 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556839 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556866 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556922 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556938 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556960 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557052 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.556709 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557095 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557116 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557133 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557152 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557170 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.557187 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: I1126 11:14:29.780070 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:14:29 crc kubenswrapper[4622]: W1126 11:14:29.799401 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-affe3e26de28e8777a05e1b7e054b582edb41801ce282b66355e58afa1141cc4 WatchSource:0}: Error finding container affe3e26de28e8777a05e1b7e054b582edb41801ce282b66355e58afa1141cc4: Status 404 returned error can't find the container with id affe3e26de28e8777a05e1b7e054b582edb41801ce282b66355e58afa1141cc4 Nov 26 11:14:29 crc kubenswrapper[4622]: E1126 11:14:29.802754 4622 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.26.254:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187b8a3c1b09ad69 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,LastTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.269515 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.270867 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.271470 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4" exitCode=0 Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.271515 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb" exitCode=0 Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.271524 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768" exitCode=0 Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.271532 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d" exitCode=2 Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.271593 4622 scope.go:117] "RemoveContainer" containerID="9fa0406543f8ed90ee098b3ba2cc90902184794c219194736d40dba9f39c1172" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.273588 4622 generic.go:334] "Generic (PLEG): container finished" podID="97944a6f-d7bf-4488-a74b-d70e36746f7f" containerID="5d67794c7a59e8ee08de61cedf907d6dcf7b652ad323f19edfb88cebe365c232" exitCode=0 Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.273658 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"97944a6f-d7bf-4488-a74b-d70e36746f7f","Type":"ContainerDied","Data":"5d67794c7a59e8ee08de61cedf907d6dcf7b652ad323f19edfb88cebe365c232"} Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.274363 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.274634 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.274933 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.276125 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40"} Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.276164 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"affe3e26de28e8777a05e1b7e054b582edb41801ce282b66355e58afa1141cc4"} Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.276750 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.277060 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.277381 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.708147 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.708552 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: I1126 11:14:30.708797 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:30 crc kubenswrapper[4622]: E1126 11:14:30.902888 4622 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.26.254:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187b8a3c1b09ad69 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,LastTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.284170 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.457362 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.457998 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.458229 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.481551 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access\") pod \"97944a6f-d7bf-4488-a74b-d70e36746f7f\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.481588 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock\") pod \"97944a6f-d7bf-4488-a74b-d70e36746f7f\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.481627 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir\") pod \"97944a6f-d7bf-4488-a74b-d70e36746f7f\" (UID: \"97944a6f-d7bf-4488-a74b-d70e36746f7f\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.481681 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "97944a6f-d7bf-4488-a74b-d70e36746f7f" (UID: "97944a6f-d7bf-4488-a74b-d70e36746f7f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.481680 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock" (OuterVolumeSpecName: "var-lock") pod "97944a6f-d7bf-4488-a74b-d70e36746f7f" (UID: "97944a6f-d7bf-4488-a74b-d70e36746f7f"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.482263 4622 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-var-lock\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.482308 4622 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/97944a6f-d7bf-4488-a74b-d70e36746f7f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.513269 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "97944a6f-d7bf-4488-a74b-d70e36746f7f" (UID: "97944a6f-d7bf-4488-a74b-d70e36746f7f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.583767 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97944a6f-d7bf-4488-a74b-d70e36746f7f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.723830 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.724781 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.725464 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.725800 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.726250 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786228 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786329 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786372 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786381 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786458 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786595 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786721 4622 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786744 4622 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:31 crc kubenswrapper[4622]: I1126 11:14:31.786766 4622 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.294170 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.294177 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"97944a6f-d7bf-4488-a74b-d70e36746f7f","Type":"ContainerDied","Data":"15269ab327a266b852b7d2e8bb123af0f99f86422624d8bdd4ca4c93c71bed65"} Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.294481 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15269ab327a266b852b7d2e8bb123af0f99f86422624d8bdd4ca4c93c71bed65" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.298096 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.298977 4622 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd" exitCode=0 Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.299043 4622 scope.go:117] "RemoveContainer" containerID="41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.299080 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.307625 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.307836 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.308022 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.313345 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.313662 4622 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.314091 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.317477 4622 scope.go:117] "RemoveContainer" containerID="78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.335797 4622 scope.go:117] "RemoveContainer" containerID="831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.347999 4622 scope.go:117] "RemoveContainer" containerID="b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.371098 4622 scope.go:117] "RemoveContainer" containerID="039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.382302 4622 scope.go:117] "RemoveContainer" containerID="02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.398210 4622 scope.go:117] "RemoveContainer" containerID="41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.399838 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\": container with ID starting with 41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4 not found: ID does not exist" containerID="41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.399880 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4"} err="failed to get container status \"41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\": rpc error: code = NotFound desc = could not find container \"41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4\": container with ID starting with 41bbb58fe1e2884327e4b3cd9c79692e46ac600e3cf7617056d9366266f9c2d4 not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.399907 4622 scope.go:117] "RemoveContainer" containerID="78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.400201 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\": container with ID starting with 78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb not found: ID does not exist" containerID="78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.400237 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb"} err="failed to get container status \"78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\": rpc error: code = NotFound desc = could not find container \"78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb\": container with ID starting with 78ed066a9b70d6dd7ca62984f6ea93d3897b045c7aae0d5d35bbfc53fd8118bb not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.400260 4622 scope.go:117] "RemoveContainer" containerID="831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.401041 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\": container with ID starting with 831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768 not found: ID does not exist" containerID="831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.401065 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768"} err="failed to get container status \"831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\": rpc error: code = NotFound desc = could not find container \"831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768\": container with ID starting with 831f4093d88c10aea3b8d6a588ecdaf4242d642c8ce13fab1dca5723000fa768 not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.401079 4622 scope.go:117] "RemoveContainer" containerID="b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.401469 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\": container with ID starting with b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d not found: ID does not exist" containerID="b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.401545 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d"} err="failed to get container status \"b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\": rpc error: code = NotFound desc = could not find container \"b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d\": container with ID starting with b01c9a8122730f1b5d2361872631478f8b440afe08e3089a34fd5b2830b5d88d not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.401579 4622 scope.go:117] "RemoveContainer" containerID="039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.402005 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\": container with ID starting with 039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd not found: ID does not exist" containerID="039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.402030 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd"} err="failed to get container status \"039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\": rpc error: code = NotFound desc = could not find container \"039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd\": container with ID starting with 039d50ff0a2accc50fd89b2960633f57e8d58233306c4113741ffffaa3f9abdd not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.402044 4622 scope.go:117] "RemoveContainer" containerID="02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f" Nov 26 11:14:32 crc kubenswrapper[4622]: E1126 11:14:32.402312 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\": container with ID starting with 02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f not found: ID does not exist" containerID="02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.402332 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f"} err="failed to get container status \"02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\": rpc error: code = NotFound desc = could not find container \"02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f\": container with ID starting with 02e44be599006641d1852fc36083e067fa7281a4f9e8ec7d8e41136fea6d606f not found: ID does not exist" Nov 26 11:14:32 crc kubenswrapper[4622]: I1126 11:14:32.712938 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Nov 26 11:14:35 crc kubenswrapper[4622]: E1126 11:14:35.736281 4622 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 192.168.26.254:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" volumeName="registry-storage" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.117571 4622 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.117941 4622 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.118338 4622 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.118565 4622 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.118812 4622 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:36 crc kubenswrapper[4622]: I1126 11:14:36.118843 4622 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.119072 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="200ms" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.320471 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="400ms" Nov 26 11:14:36 crc kubenswrapper[4622]: E1126 11:14:36.721319 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="800ms" Nov 26 11:14:37 crc kubenswrapper[4622]: E1126 11:14:37.523073 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="1.6s" Nov 26 11:14:39 crc kubenswrapper[4622]: E1126 11:14:39.124403 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="3.2s" Nov 26 11:14:40 crc kubenswrapper[4622]: I1126 11:14:40.707842 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:40 crc kubenswrapper[4622]: I1126 11:14:40.708388 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:40 crc kubenswrapper[4622]: E1126 11:14:40.903375 4622 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.26.254:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187b8a3c1b09ad69 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,LastTimestamp:2025-11-26 11:14:29.801971049 +0000 UTC m=+229.393182571,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.706023 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.706767 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.707130 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.720041 4622 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.720068 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:41 crc kubenswrapper[4622]: E1126 11:14:41.720648 4622 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:41 crc kubenswrapper[4622]: I1126 11:14:41.721578 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:41 crc kubenswrapper[4622]: W1126 11:14:41.739201 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-813e02092d4554b554d9dfb0475c0feb9f68daec1ffb80c5821262dc1e7b6441 WatchSource:0}: Error finding container 813e02092d4554b554d9dfb0475c0feb9f68daec1ffb80c5821262dc1e7b6441: Status 404 returned error can't find the container with id 813e02092d4554b554d9dfb0475c0feb9f68daec1ffb80c5821262dc1e7b6441 Nov 26 11:14:42 crc kubenswrapper[4622]: E1126 11:14:42.326150 4622 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.254:6443: connect: connection refused" interval="6.4s" Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362109 4622 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="e363881e9f24076730ddb8091e3b32713fac73c65e167bc034c529931bfc60af" exitCode=0 Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362167 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"e363881e9f24076730ddb8091e3b32713fac73c65e167bc034c529931bfc60af"} Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362211 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"813e02092d4554b554d9dfb0475c0feb9f68daec1ffb80c5821262dc1e7b6441"} Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362495 4622 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362538 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.362948 4622 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:42 crc kubenswrapper[4622]: E1126 11:14:42.362968 4622 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:42 crc kubenswrapper[4622]: I1126 11:14:42.363387 4622 status_manager.go:851] "Failed to get status for pod" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.254:6443: connect: connection refused" Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371113 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"aa9a765eb9f8c3caefc9237e93cda395406563fb8253adc65c0d1ad5ac5ff27c"} Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371466 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"24031c884713b2b3b0f3acaec4488a5e4ac35c485fad0de8a5135db579ad9e06"} Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371480 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"72445c34e7c3889f698274ce2da217e43ea2be167aa9c047f248402926f0dc15"} Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371490 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7a7b53366c089c73ca451875dfe1756a2f742a9e543ab98344dc376723084a2f"} Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371517 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1897420a0315bac1414f3f475cd29190a2d451cf8dd35d59c875dce6b4163fdb"} Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371786 4622 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.371802 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:43 crc kubenswrapper[4622]: I1126 11:14:43.372274 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:44 crc kubenswrapper[4622]: I1126 11:14:44.379143 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 26 11:14:44 crc kubenswrapper[4622]: I1126 11:14:44.379205 4622 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30" exitCode=1 Nov 26 11:14:44 crc kubenswrapper[4622]: I1126 11:14:44.379247 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30"} Nov 26 11:14:44 crc kubenswrapper[4622]: I1126 11:14:44.379870 4622 scope.go:117] "RemoveContainer" containerID="8121c0d3b9fc75fc3fc09a7b51e764ed788657b2e0bd6efd86b6e5231e7c2c30" Nov 26 11:14:45 crc kubenswrapper[4622]: I1126 11:14:45.389127 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 26 11:14:45 crc kubenswrapper[4622]: I1126 11:14:45.389542 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d470bfd682f29ba48a4bac5d60f3a5b48fb831ab637c233fc1460bd709309ffc"} Nov 26 11:14:45 crc kubenswrapper[4622]: I1126 11:14:45.764025 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:14:45 crc kubenswrapper[4622]: I1126 11:14:45.768223 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:14:46 crc kubenswrapper[4622]: I1126 11:14:46.407028 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:14:46 crc kubenswrapper[4622]: I1126 11:14:46.725442 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:46 crc kubenswrapper[4622]: I1126 11:14:46.725519 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:46 crc kubenswrapper[4622]: I1126 11:14:46.729151 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:48 crc kubenswrapper[4622]: I1126 11:14:48.868049 4622 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:49 crc kubenswrapper[4622]: I1126 11:14:49.420129 4622 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:49 crc kubenswrapper[4622]: I1126 11:14:49.420157 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:49 crc kubenswrapper[4622]: I1126 11:14:49.423625 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:14:50 crc kubenswrapper[4622]: I1126 11:14:50.424160 4622 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:50 crc kubenswrapper[4622]: I1126 11:14:50.424533 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="862cb43b-4d06-4de6-b69b-3df1965f8ea2" Nov 26 11:14:50 crc kubenswrapper[4622]: I1126 11:14:50.714768 4622 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="84b1ae3b-3e43-4121-a677-1757ef2d6cf9" Nov 26 11:14:58 crc kubenswrapper[4622]: I1126 11:14:58.440864 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 26 11:14:59 crc kubenswrapper[4622]: I1126 11:14:59.480197 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 26 11:14:59 crc kubenswrapper[4622]: I1126 11:14:59.579399 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 26 11:15:00 crc kubenswrapper[4622]: I1126 11:15:00.388784 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.036827 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.397865 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.438741 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.475955 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.513176 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.562927 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.591049 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.845370 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 26 11:15:01 crc kubenswrapper[4622]: I1126 11:15:01.851636 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.026486 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.115237 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.156855 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.248425 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.297174 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.677592 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.690439 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.768938 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.947598 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.977721 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 26 11:15:02 crc kubenswrapper[4622]: I1126 11:15:02.987735 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.020892 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.092131 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.256141 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.276360 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.509529 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.516008 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.567708 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.615652 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.808907 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.827655 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.840417 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.930138 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.953191 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.961884 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.993262 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 26 11:15:03 crc kubenswrapper[4622]: I1126 11:15:03.994933 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.036398 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.134979 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.151847 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.199305 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.268948 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.274616 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.399926 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.676170 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.711908 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.753168 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.788476 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.810294 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.835917 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.910140 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 26 11:15:04 crc kubenswrapper[4622]: I1126 11:15:04.971461 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.022441 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.079352 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.102886 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.200732 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.234786 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.243221 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.304638 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.367728 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.371067 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.682089 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.761726 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.774070 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.787042 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.881134 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.954943 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 26 11:15:05 crc kubenswrapper[4622]: I1126 11:15:05.983856 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.057616 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.296251 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.307789 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.346727 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.470794 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.606574 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.606646 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.647146 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.700593 4622 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.750719 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.766132 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.784061 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.789205 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.796656 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 26 11:15:06 crc kubenswrapper[4622]: I1126 11:15:06.829859 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.134790 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.135778 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.263440 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.291004 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.358191 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.398969 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.452961 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.453766 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.603805 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.616209 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.684434 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.689458 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.809763 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.825407 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.900175 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.910828 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 26 11:15:07 crc kubenswrapper[4622]: I1126 11:15:07.956991 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.055937 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.114744 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.206821 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.242536 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.244652 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.248888 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.408918 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.430397 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.512404 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.617685 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.638443 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.650921 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.713549 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.727722 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.768516 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.837474 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 26 11:15:08 crc kubenswrapper[4622]: I1126 11:15:08.924156 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.008235 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.049946 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.143271 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.162713 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.180148 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.254309 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.278988 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.299850 4622 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.349073 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.354635 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.359817 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.374612 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.393252 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.416361 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.417521 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.599458 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.633240 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.673742 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 26 11:15:09 crc kubenswrapper[4622]: I1126 11:15:09.752048 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.053963 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.117129 4622 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.117620 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.117598882 podStartE2EDuration="41.117598882s" podCreationTimestamp="2025-11-26 11:14:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:14:48.915438183 +0000 UTC m=+248.506649705" watchObservedRunningTime="2025-11-26 11:15:10.117598882 +0000 UTC m=+269.708810405" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.123541 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.123596 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.127455 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.144918 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.144897961 podStartE2EDuration="22.144897961s" podCreationTimestamp="2025-11-26 11:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:15:10.142631143 +0000 UTC m=+269.733842666" watchObservedRunningTime="2025-11-26 11:15:10.144897961 +0000 UTC m=+269.736109513" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.168195 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.252025 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.285479 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7"] Nov 26 11:15:10 crc kubenswrapper[4622]: E1126 11:15:10.285712 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" containerName="installer" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.285725 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" containerName="installer" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.285814 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="97944a6f-d7bf-4488-a74b-d70e36746f7f" containerName="installer" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.286163 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.290221 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.290567 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.325562 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.325634 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.325782 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7tkj\" (UniqueName: \"kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.339380 4622 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.339647 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40" gracePeriod=5 Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.360929 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.390722 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.426730 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7tkj\" (UniqueName: \"kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.426834 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.426884 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.427991 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.441447 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.444705 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7tkj\" (UniqueName: \"kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj\") pod \"collect-profiles-29402595-48vr7\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.456043 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.544890 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.601675 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.662957 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.721408 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.723779 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.752727 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.862556 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.894979 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 26 11:15:10 crc kubenswrapper[4622]: I1126 11:15:10.947860 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.008701 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.127036 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.147843 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.166075 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.201176 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.203065 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.292925 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.297150 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.298425 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.366460 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.373737 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.445421 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.485165 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.544110 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.572280 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.649091 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.692533 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.795605 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.861125 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.889674 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 26 11:15:11 crc kubenswrapper[4622]: I1126 11:15:11.904909 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.022824 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.023048 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l97bl" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="registry-server" containerID="cri-o://992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0" gracePeriod=30 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.041533 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.041811 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d4zmf" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="registry-server" containerID="cri-o://9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d" gracePeriod=30 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.044612 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.048170 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.048443 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerName="marketplace-operator" containerID="cri-o://49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763" gracePeriod=30 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.050294 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.050601 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5bd7z" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="registry-server" containerID="cri-o://e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd" gracePeriod=30 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.058015 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.058219 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wnxq7" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="registry-server" containerID="cri-o://89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8" gracePeriod=30 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.076407 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f7zqw"] Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.076701 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.076721 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.076845 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.077251 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.106015 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.158184 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.209096 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.210109 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.250018 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.250147 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl8rm\" (UniqueName: \"kubernetes.io/projected/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-kube-api-access-hl8rm\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.250211 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.283601 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.352151 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.352274 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl8rm\" (UniqueName: \"kubernetes.io/projected/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-kube-api-access-hl8rm\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.352346 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.353283 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.359270 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.369741 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl8rm\" (UniqueName: \"kubernetes.io/projected/f2df0e8a-78e0-4d5a-8d31-e7744df07db7-kube-api-access-hl8rm\") pod \"marketplace-operator-79b997595-f7zqw\" (UID: \"f2df0e8a-78e0-4d5a-8d31-e7744df07db7\") " pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.454283 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.462872 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.463481 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.463889 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.496777 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.531304 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.542839 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.544595 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerID="e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd" exitCode=0 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.544686 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerDied","Data":"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.544709 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bd7z" event={"ID":"a6824d35-eddb-4fb5-821d-0a82184fbe45","Type":"ContainerDied","Data":"47f41e615c9343ff142551232b62db8214248bc4bae712d386fa8100b18e4786"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.544764 4622 scope.go:117] "RemoveContainer" containerID="e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.544872 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bd7z" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.557676 4622 generic.go:334] "Generic (PLEG): container finished" podID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerID="992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0" exitCode=0 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.557725 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerDied","Data":"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.557747 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l97bl" event={"ID":"b71a6300-1cfd-4eb0-b75c-7231184cfe79","Type":"ContainerDied","Data":"aee024512351b8be977aab42420a79e3ef0b5d6c770e0525d943302d4645e59f"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.557804 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l97bl" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.560309 4622 generic.go:334] "Generic (PLEG): container finished" podID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerID="89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8" exitCode=0 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.560359 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerDied","Data":"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.560375 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnxq7" event={"ID":"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6","Type":"ContainerDied","Data":"e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.560417 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnxq7" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.563384 4622 generic.go:334] "Generic (PLEG): container finished" podID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerID="49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763" exitCode=0 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.563433 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" event={"ID":"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992","Type":"ContainerDied","Data":"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.563451 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" event={"ID":"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992","Type":"ContainerDied","Data":"66decb7e0c3f3e3897de18c4306eab60a0293e9779d563871ca31eac6737d73f"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.563483 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vgnvk" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.564105 4622 scope.go:117] "RemoveContainer" containerID="91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.575464 4622 generic.go:334] "Generic (PLEG): container finished" podID="acccae42-8133-475f-ad53-dbfa434e5e45" containerID="9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d" exitCode=0 Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.575532 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerDied","Data":"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.575560 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4zmf" event={"ID":"acccae42-8133-475f-ad53-dbfa434e5e45","Type":"ContainerDied","Data":"1f3cb3394fa16af51bbaefdabe25f42a7d2d3648983a1231f428305d92af1db4"} Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.575578 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4zmf" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.585797 4622 scope.go:117] "RemoveContainer" containerID="02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.600166 4622 scope.go:117] "RemoveContainer" containerID="e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.600608 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd\": container with ID starting with e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd not found: ID does not exist" containerID="e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.600649 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd"} err="failed to get container status \"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd\": rpc error: code = NotFound desc = could not find container \"e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd\": container with ID starting with e3f5157c1c1157088bb0a65fc04f870cf23b426711762aa13245686fb48564fd not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.600678 4622 scope.go:117] "RemoveContainer" containerID="91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.601085 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5\": container with ID starting with 91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5 not found: ID does not exist" containerID="91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.601119 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5"} err="failed to get container status \"91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5\": rpc error: code = NotFound desc = could not find container \"91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5\": container with ID starting with 91c7fd08aa43f0dc9ee814da58e4dd2b3d51df3930777a466580467dd0a8ecd5 not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.601141 4622 scope.go:117] "RemoveContainer" containerID="02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.601381 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b\": container with ID starting with 02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b not found: ID does not exist" containerID="02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.601403 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b"} err="failed to get container status \"02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b\": rpc error: code = NotFound desc = could not find container \"02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b\": container with ID starting with 02bdb6f3f6a05f7a21090ef0cbc2ea4f9e0b1ef5787b6ed7779ffa0f4d12c16b not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.601417 4622 scope.go:117] "RemoveContainer" containerID="992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.612907 4622 scope.go:117] "RemoveContainer" containerID="baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.632997 4622 scope.go:117] "RemoveContainer" containerID="60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.646563 4622 scope.go:117] "RemoveContainer" containerID="992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.646948 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0\": container with ID starting with 992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0 not found: ID does not exist" containerID="992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647044 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0"} err="failed to get container status \"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0\": rpc error: code = NotFound desc = could not find container \"992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0\": container with ID starting with 992ab2b07f34c38206374fdd49c1015600a4a7f3bf2e603321ba3a3abc3625c0 not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647114 4622 scope.go:117] "RemoveContainer" containerID="baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.647403 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c\": container with ID starting with baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c not found: ID does not exist" containerID="baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647450 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c"} err="failed to get container status \"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c\": rpc error: code = NotFound desc = could not find container \"baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c\": container with ID starting with baf014913e4f40b58a55614cbb91d7afecba07bf77a923eb8d450f8e81c8a44c not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647475 4622 scope.go:117] "RemoveContainer" containerID="60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.647718 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf\": container with ID starting with 60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf not found: ID does not exist" containerID="60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647743 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf"} err="failed to get container status \"60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf\": rpc error: code = NotFound desc = could not find container \"60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf\": container with ID starting with 60fa255d3790d33ec2e976f3c8061e3bb579fe4ed151d0ab79963b28bca05abf not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.647759 4622 scope.go:117] "RemoveContainer" containerID="89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657171 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content\") pod \"acccae42-8133-475f-ad53-dbfa434e5e45\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657217 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content\") pod \"a6824d35-eddb-4fb5-821d-0a82184fbe45\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657241 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm9l7\" (UniqueName: \"kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7\") pod \"acccae42-8133-475f-ad53-dbfa434e5e45\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657265 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content\") pod \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657297 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcjsh\" (UniqueName: \"kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh\") pod \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657322 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities\") pod \"a6824d35-eddb-4fb5-821d-0a82184fbe45\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657403 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities\") pod \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657423 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics\") pod \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657455 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content\") pod \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\" (UID: \"31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657489 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca\") pod \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657592 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8snr\" (UniqueName: \"kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr\") pod \"a6824d35-eddb-4fb5-821d-0a82184fbe45\" (UID: \"a6824d35-eddb-4fb5-821d-0a82184fbe45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657654 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities\") pod \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657671 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6tpz\" (UniqueName: \"kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz\") pod \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\" (UID: \"b71a6300-1cfd-4eb0-b75c-7231184cfe79\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657689 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6zfv\" (UniqueName: \"kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv\") pod \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\" (UID: \"8bd37ceb-bccf-4efc-914e-0fd8ad2bc992\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.657714 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities\") pod \"acccae42-8133-475f-ad53-dbfa434e5e45\" (UID: \"acccae42-8133-475f-ad53-dbfa434e5e45\") " Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.658223 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities" (OuterVolumeSpecName: "utilities") pod "a6824d35-eddb-4fb5-821d-0a82184fbe45" (UID: "a6824d35-eddb-4fb5-821d-0a82184fbe45"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.658322 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities" (OuterVolumeSpecName: "utilities") pod "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" (UID: "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.659116 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" (UID: "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.659275 4622 scope.go:117] "RemoveContainer" containerID="60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.659552 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities" (OuterVolumeSpecName: "utilities") pod "b71a6300-1cfd-4eb0-b75c-7231184cfe79" (UID: "b71a6300-1cfd-4eb0-b75c-7231184cfe79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.660629 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh" (OuterVolumeSpecName: "kube-api-access-dcjsh") pod "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" (UID: "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6"). InnerVolumeSpecName "kube-api-access-dcjsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.661131 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.662153 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv" (OuterVolumeSpecName: "kube-api-access-c6zfv") pod "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" (UID: "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992"). InnerVolumeSpecName "kube-api-access-c6zfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.663922 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz" (OuterVolumeSpecName: "kube-api-access-z6tpz") pod "b71a6300-1cfd-4eb0-b75c-7231184cfe79" (UID: "b71a6300-1cfd-4eb0-b75c-7231184cfe79"). InnerVolumeSpecName "kube-api-access-z6tpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.665656 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" (UID: "8bd37ceb-bccf-4efc-914e-0fd8ad2bc992"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.665938 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities" (OuterVolumeSpecName: "utilities") pod "acccae42-8133-475f-ad53-dbfa434e5e45" (UID: "acccae42-8133-475f-ad53-dbfa434e5e45"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.668010 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr" (OuterVolumeSpecName: "kube-api-access-d8snr") pod "a6824d35-eddb-4fb5-821d-0a82184fbe45" (UID: "a6824d35-eddb-4fb5-821d-0a82184fbe45"). InnerVolumeSpecName "kube-api-access-d8snr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.668229 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7" (OuterVolumeSpecName: "kube-api-access-qm9l7") pod "acccae42-8133-475f-ad53-dbfa434e5e45" (UID: "acccae42-8133-475f-ad53-dbfa434e5e45"). InnerVolumeSpecName "kube-api-access-qm9l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.673713 4622 scope.go:117] "RemoveContainer" containerID="b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.675175 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.676223 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6824d35-eddb-4fb5-821d-0a82184fbe45" (UID: "a6824d35-eddb-4fb5-821d-0a82184fbe45"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.685654 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.687294 4622 scope.go:117] "RemoveContainer" containerID="89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.687890 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8\": container with ID starting with 89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8 not found: ID does not exist" containerID="89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.687932 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8"} err="failed to get container status \"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8\": rpc error: code = NotFound desc = could not find container \"89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8\": container with ID starting with 89558b88f96ba4cc93a4c564da5d2e39101444e9b65d2acfc51418ffe59cafa8 not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.687961 4622 scope.go:117] "RemoveContainer" containerID="60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.688289 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c\": container with ID starting with 60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c not found: ID does not exist" containerID="60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.688341 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c"} err="failed to get container status \"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c\": rpc error: code = NotFound desc = could not find container \"60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c\": container with ID starting with 60e3c39f3007d63a87d8e42c82906969ebe7a5d605fb6f0e2116ab1b6475423c not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.688363 4622 scope.go:117] "RemoveContainer" containerID="b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.688671 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99\": container with ID starting with b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99 not found: ID does not exist" containerID="b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.688692 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99"} err="failed to get container status \"b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99\": rpc error: code = NotFound desc = could not find container \"b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99\": container with ID starting with b297f353335547ad702b9e185558f4a6ddba62c7c3360308c9a4e07ac725bd99 not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.688743 4622 scope.go:117] "RemoveContainer" containerID="49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.708284 4622 scope.go:117] "RemoveContainer" containerID="49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.709277 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763\": container with ID starting with 49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763 not found: ID does not exist" containerID="49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.709340 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763"} err="failed to get container status \"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763\": rpc error: code = NotFound desc = could not find container \"49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763\": container with ID starting with 49c3c118ef9985260f58b7758f6bff035f64c948cb2fde2e91402dbae1e00763 not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.709359 4622 scope.go:117] "RemoveContainer" containerID="9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.724660 4622 scope.go:117] "RemoveContainer" containerID="83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.724928 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.728714 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b71a6300-1cfd-4eb0-b75c-7231184cfe79" (UID: "b71a6300-1cfd-4eb0-b75c-7231184cfe79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.730822 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "acccae42-8133-475f-ad53-dbfa434e5e45" (UID: "acccae42-8133-475f-ad53-dbfa434e5e45"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.739246 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" (UID: "31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.739310 4622 scope.go:117] "RemoveContainer" containerID="e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.753716 4622 scope.go:117] "RemoveContainer" containerID="9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.754087 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d\": container with ID starting with 9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d not found: ID does not exist" containerID="9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.754128 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d"} err="failed to get container status \"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d\": rpc error: code = NotFound desc = could not find container \"9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d\": container with ID starting with 9689bbc1d9d225e5b3c21af0c6e5d85aa44f1b93604ce3eb920df99c61f74b0d not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.754157 4622 scope.go:117] "RemoveContainer" containerID="83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.754640 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a\": container with ID starting with 83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a not found: ID does not exist" containerID="83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.754690 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a"} err="failed to get container status \"83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a\": rpc error: code = NotFound desc = could not find container \"83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a\": container with ID starting with 83e57793cff3d432ce586087757e30b9d9501ffda0a67c5ecde54b9a6599a71a not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.754727 4622 scope.go:117] "RemoveContainer" containerID="e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c" Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.755041 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c\": container with ID starting with e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c not found: ID does not exist" containerID="e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.755067 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c"} err="failed to get container status \"e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c\": rpc error: code = NotFound desc = could not find container \"e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c\": container with ID starting with e6f92b0524cf343968e30d6e6127225d31b5b8d2a50fe8093b4f87d78fea114c not found: ID does not exist" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759280 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8snr\" (UniqueName: \"kubernetes.io/projected/a6824d35-eddb-4fb5-821d-0a82184fbe45-kube-api-access-d8snr\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759304 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759318 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6tpz\" (UniqueName: \"kubernetes.io/projected/b71a6300-1cfd-4eb0-b75c-7231184cfe79-kube-api-access-z6tpz\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759336 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6zfv\" (UniqueName: \"kubernetes.io/projected/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-kube-api-access-c6zfv\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759345 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759354 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acccae42-8133-475f-ad53-dbfa434e5e45-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759365 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759374 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm9l7\" (UniqueName: \"kubernetes.io/projected/acccae42-8133-475f-ad53-dbfa434e5e45-kube-api-access-qm9l7\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759386 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b71a6300-1cfd-4eb0-b75c-7231184cfe79-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759396 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcjsh\" (UniqueName: \"kubernetes.io/projected/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-kube-api-access-dcjsh\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759405 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6824d35-eddb-4fb5-821d-0a82184fbe45-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759414 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759423 4622 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759431 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.759440 4622 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.836877 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.847202 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.849913 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.867100 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.871365 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bd7z"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.879998 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.884065 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vgnvk"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.888396 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.894144 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wnxq7"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.901523 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.905417 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l97bl"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.907825 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:15:12 crc kubenswrapper[4622]: I1126 11:15:12.911034 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d4zmf"] Nov 26 11:15:12 crc kubenswrapper[4622]: E1126 11:15:12.943993 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb71a6300_1cfd_4eb0_b75c_7231184cfe79.slice/crio-aee024512351b8be977aab42420a79e3ef0b5d6c770e0525d943302d4645e59f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb71a6300_1cfd_4eb0_b75c_7231184cfe79.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31f8dcbc_2fc0_4edd_abf1_2f2aa24a89a6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacccae42_8133_475f_ad53_dbfa434e5e45.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31f8dcbc_2fc0_4edd_abf1_2f2aa24a89a6.slice/crio-e3fc48db502f84104bd11d797f351eb723d4c3018ab1c77e205177ecac01d63d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacccae42_8133_475f_ad53_dbfa434e5e45.slice/crio-1f3cb3394fa16af51bbaefdabe25f42a7d2d3648983a1231f428305d92af1db4\": RecentStats: unable to find data in memory cache]" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.016782 4622 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.076463 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.254253 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.262111 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.290829 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.411808 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.426148 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.547233 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.580763 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.586369 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.604798 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.670886 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.736616 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.747803 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.780871 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.787211 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.799582 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.810801 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.814069 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.888674 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 26 11:15:13 crc kubenswrapper[4622]: I1126 11:15:13.932495 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.051247 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.145366 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.180190 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.234107 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.296579 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f7zqw"] Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.298370 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7"] Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.308728 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.349322 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.367453 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.461728 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.571216 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.634482 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.719887 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" path="/var/lib/kubelet/pods/31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6/volumes" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.721683 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" path="/var/lib/kubelet/pods/8bd37ceb-bccf-4efc-914e-0fd8ad2bc992/volumes" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.722242 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" path="/var/lib/kubelet/pods/a6824d35-eddb-4fb5-821d-0a82184fbe45/volumes" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.724783 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" path="/var/lib/kubelet/pods/acccae42-8133-475f-ad53-dbfa434e5e45/volumes" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.725936 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" path="/var/lib/kubelet/pods/b71a6300-1cfd-4eb0-b75c-7231184cfe79/volumes" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.726868 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7"] Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.737113 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f7zqw"] Nov 26 11:15:14 crc kubenswrapper[4622]: W1126 11:15:14.740759 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2df0e8a_78e0_4d5a_8d31_e7744df07db7.slice/crio-ee330d06a703d0ad0fd0a293fe136bb37f2fe91980c41a32fa3b4dcc80b9b236 WatchSource:0}: Error finding container ee330d06a703d0ad0fd0a293fe136bb37f2fe91980c41a32fa3b4dcc80b9b236: Status 404 returned error can't find the container with id ee330d06a703d0ad0fd0a293fe136bb37f2fe91980c41a32fa3b4dcc80b9b236 Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.751945 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.772135 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.829494 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.845663 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.908091 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.943737 4622 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 26 11:15:14 crc kubenswrapper[4622]: I1126 11:15:14.969587 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.018989 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.075042 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.163661 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.197191 4622 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.236826 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.331074 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.346164 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.445455 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.445579 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.524987 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.559199 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.581148 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.598475 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.598546 4622 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40" exitCode=137 Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.598645 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.598704 4622 scope.go:117] "RemoveContainer" containerID="1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.598972 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599048 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599065 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599084 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599118 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599145 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599162 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599209 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599229 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599420 4622 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599437 4622 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599447 4622 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.599456 4622 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.600877 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" event={"ID":"f2df0e8a-78e0-4d5a-8d31-e7744df07db7","Type":"ContainerStarted","Data":"466d75bf11292278669f267584b62fbf6d560ff94ded057696d7328c301103ca"} Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.600953 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" event={"ID":"f2df0e8a-78e0-4d5a-8d31-e7744df07db7","Type":"ContainerStarted","Data":"ee330d06a703d0ad0fd0a293fe136bb37f2fe91980c41a32fa3b4dcc80b9b236"} Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.601177 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.603464 4622 generic.go:334] "Generic (PLEG): container finished" podID="dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" containerID="be58c9e1fa65f20292e110b611da1a3c9bf8d509cddde7b946b2e9f43d682ef5" exitCode=0 Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.603533 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" event={"ID":"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01","Type":"ContainerDied","Data":"be58c9e1fa65f20292e110b611da1a3c9bf8d509cddde7b946b2e9f43d682ef5"} Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.603566 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" event={"ID":"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01","Type":"ContainerStarted","Data":"cb1b7719799feb4229e9d80536f5030ae2455a7aef9da026fb3a9068a3bc868f"} Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.608086 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.610071 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.617074 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-f7zqw" podStartSLOduration=3.6170640069999997 podStartE2EDuration="3.617064007s" podCreationTimestamp="2025-11-26 11:15:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:15:15.613365748 +0000 UTC m=+275.204577270" watchObservedRunningTime="2025-11-26 11:15:15.617064007 +0000 UTC m=+275.208275530" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.618839 4622 scope.go:117] "RemoveContainer" containerID="1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40" Nov 26 11:15:15 crc kubenswrapper[4622]: E1126 11:15:15.619180 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40\": container with ID starting with 1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40 not found: ID does not exist" containerID="1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.619210 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40"} err="failed to get container status \"1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40\": rpc error: code = NotFound desc = could not find container \"1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40\": container with ID starting with 1984a2fea75478eef5a52b12a7a436daac32be5ba7d40c7f8f3a90995a343a40 not found: ID does not exist" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.700433 4622 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.736256 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.832443 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.869465 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 26 11:15:15 crc kubenswrapper[4622]: I1126 11:15:15.894928 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.114351 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.199802 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.412707 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.720162 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.720535 4622 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.727817 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.727838 4622 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="62f590cb-ca72-4de7-b1d6-a537efa6eedb" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.730041 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.730073 4622 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="62f590cb-ca72-4de7-b1d6-a537efa6eedb" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.751878 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.800676 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.919044 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume\") pod \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.919112 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume\") pod \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.919205 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7tkj\" (UniqueName: \"kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj\") pod \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\" (UID: \"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01\") " Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.919969 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume" (OuterVolumeSpecName: "config-volume") pod "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" (UID: "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.924813 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj" (OuterVolumeSpecName: "kube-api-access-q7tkj") pod "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" (UID: "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01"). InnerVolumeSpecName "kube-api-access-q7tkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:16 crc kubenswrapper[4622]: I1126 11:15:16.924837 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" (UID: "dd944da3-1a0a-4c06-b3fe-9fcc8187fb01"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.021028 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7tkj\" (UniqueName: \"kubernetes.io/projected/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-kube-api-access-q7tkj\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.021069 4622 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.021086 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.328672 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.618133 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" event={"ID":"dd944da3-1a0a-4c06-b3fe-9fcc8187fb01","Type":"ContainerDied","Data":"cb1b7719799feb4229e9d80536f5030ae2455a7aef9da026fb3a9068a3bc868f"} Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.618204 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb1b7719799feb4229e9d80536f5030ae2455a7aef9da026fb3a9068a3bc868f" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.618159 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.651602 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.808317 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 26 11:15:17 crc kubenswrapper[4622]: I1126 11:15:17.985424 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 26 11:15:18 crc kubenswrapper[4622]: I1126 11:15:18.057159 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 26 11:15:18 crc kubenswrapper[4622]: I1126 11:15:18.219857 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 26 11:15:18 crc kubenswrapper[4622]: I1126 11:15:18.234910 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.082757 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.083531 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" podUID="05f00222-b862-4024-a903-c006f34852fb" containerName="controller-manager" containerID="cri-o://a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109" gracePeriod=30 Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.189514 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.189719 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" podUID="867b2e21-3905-4d08-b96c-e23c8240d93d" containerName="route-controller-manager" containerID="cri-o://d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121" gracePeriod=30 Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.386858 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.474522 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.570800 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert\") pod \"05f00222-b862-4024-a903-c006f34852fb\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.570867 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca\") pod \"05f00222-b862-4024-a903-c006f34852fb\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.570892 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles\") pod \"05f00222-b862-4024-a903-c006f34852fb\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.570977 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config\") pod \"05f00222-b862-4024-a903-c006f34852fb\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571003 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mzhl\" (UniqueName: \"kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl\") pod \"05f00222-b862-4024-a903-c006f34852fb\" (UID: \"05f00222-b862-4024-a903-c006f34852fb\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571126 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config\") pod \"867b2e21-3905-4d08-b96c-e23c8240d93d\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571178 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca\") pod \"867b2e21-3905-4d08-b96c-e23c8240d93d\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571594 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca" (OuterVolumeSpecName: "client-ca") pod "05f00222-b862-4024-a903-c006f34852fb" (UID: "05f00222-b862-4024-a903-c006f34852fb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571826 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "05f00222-b862-4024-a903-c006f34852fb" (UID: "05f00222-b862-4024-a903-c006f34852fb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571878 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config" (OuterVolumeSpecName: "config") pod "05f00222-b862-4024-a903-c006f34852fb" (UID: "05f00222-b862-4024-a903-c006f34852fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571901 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca" (OuterVolumeSpecName: "client-ca") pod "867b2e21-3905-4d08-b96c-e23c8240d93d" (UID: "867b2e21-3905-4d08-b96c-e23c8240d93d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.571927 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config" (OuterVolumeSpecName: "config") pod "867b2e21-3905-4d08-b96c-e23c8240d93d" (UID: "867b2e21-3905-4d08-b96c-e23c8240d93d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.575810 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "05f00222-b862-4024-a903-c006f34852fb" (UID: "05f00222-b862-4024-a903-c006f34852fb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.576141 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl" (OuterVolumeSpecName: "kube-api-access-7mzhl") pod "05f00222-b862-4024-a903-c006f34852fb" (UID: "05f00222-b862-4024-a903-c006f34852fb"). InnerVolumeSpecName "kube-api-access-7mzhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.672513 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtd4w\" (UniqueName: \"kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w\") pod \"867b2e21-3905-4d08-b96c-e23c8240d93d\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.672583 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert\") pod \"867b2e21-3905-4d08-b96c-e23c8240d93d\" (UID: \"867b2e21-3905-4d08-b96c-e23c8240d93d\") " Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.675403 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "867b2e21-3905-4d08-b96c-e23c8240d93d" (UID: "867b2e21-3905-4d08-b96c-e23c8240d93d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676298 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676328 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f00222-b862-4024-a903-c006f34852fb-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676337 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676352 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676365 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f00222-b862-4024-a903-c006f34852fb-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676390 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mzhl\" (UniqueName: \"kubernetes.io/projected/05f00222-b862-4024-a903-c006f34852fb-kube-api-access-7mzhl\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.676399 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/867b2e21-3905-4d08-b96c-e23c8240d93d-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.679263 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w" (OuterVolumeSpecName: "kube-api-access-xtd4w") pod "867b2e21-3905-4d08-b96c-e23c8240d93d" (UID: "867b2e21-3905-4d08-b96c-e23c8240d93d"). InnerVolumeSpecName "kube-api-access-xtd4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.757196 4622 generic.go:334] "Generic (PLEG): container finished" podID="867b2e21-3905-4d08-b96c-e23c8240d93d" containerID="d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121" exitCode=0 Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.757290 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" event={"ID":"867b2e21-3905-4d08-b96c-e23c8240d93d","Type":"ContainerDied","Data":"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121"} Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.757345 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" event={"ID":"867b2e21-3905-4d08-b96c-e23c8240d93d","Type":"ContainerDied","Data":"378131fe0db66186f44d756abebd94be91bdba8be97485eac30ed9abbceff364"} Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.757364 4622 scope.go:117] "RemoveContainer" containerID="d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.757543 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.760171 4622 generic.go:334] "Generic (PLEG): container finished" podID="05f00222-b862-4024-a903-c006f34852fb" containerID="a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109" exitCode=0 Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.760199 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" event={"ID":"05f00222-b862-4024-a903-c006f34852fb","Type":"ContainerDied","Data":"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109"} Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.760216 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" event={"ID":"05f00222-b862-4024-a903-c006f34852fb","Type":"ContainerDied","Data":"099cccb0f6c810668605e3a8ad4cf4bbbd9885ec70f13a02d158ca7b2168e5d4"} Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.760249 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g675c" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.772588 4622 scope.go:117] "RemoveContainer" containerID="d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121" Nov 26 11:15:42 crc kubenswrapper[4622]: E1126 11:15:42.772839 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121\": container with ID starting with d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121 not found: ID does not exist" containerID="d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.772869 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121"} err="failed to get container status \"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121\": rpc error: code = NotFound desc = could not find container \"d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121\": container with ID starting with d0e0b24eaef92e3c25620a41ee6ed79feccb564c873cca9cd697bd2a7d768121 not found: ID does not exist" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.772887 4622 scope.go:117] "RemoveContainer" containerID="a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.773342 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.776017 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-djf82"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.780675 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtd4w\" (UniqueName: \"kubernetes.io/projected/867b2e21-3905-4d08-b96c-e23c8240d93d-kube-api-access-xtd4w\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.780700 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/867b2e21-3905-4d08-b96c-e23c8240d93d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.781455 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.783448 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g675c"] Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.789111 4622 scope.go:117] "RemoveContainer" containerID="a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109" Nov 26 11:15:42 crc kubenswrapper[4622]: E1126 11:15:42.789668 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109\": container with ID starting with a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109 not found: ID does not exist" containerID="a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109" Nov 26 11:15:42 crc kubenswrapper[4622]: I1126 11:15:42.789704 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109"} err="failed to get container status \"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109\": rpc error: code = NotFound desc = could not find container \"a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109\": container with ID starting with a7ca60b6686b82aa94c735033253c7c6ba463c9c4ff9ac0e7b56b3349b462109 not found: ID does not exist" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.488905 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489119 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489134 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489143 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489149 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489157 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489164 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489173 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489179 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489187 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867b2e21-3905-4d08-b96c-e23c8240d93d" containerName="route-controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489192 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="867b2e21-3905-4d08-b96c-e23c8240d93d" containerName="route-controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489203 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f00222-b862-4024-a903-c006f34852fb" containerName="controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489208 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f00222-b862-4024-a903-c006f34852fb" containerName="controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489216 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489221 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489228 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489233 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489240 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489246 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489253 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489258 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489265 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerName="marketplace-operator" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489272 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerName="marketplace-operator" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489279 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489285 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489291 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" containerName="collect-profiles" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489298 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" containerName="collect-profiles" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489305 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489310 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="extract-utilities" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489317 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489321 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: E1126 11:15:43.489331 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489337 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="extract-content" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489434 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6824d35-eddb-4fb5-821d-0a82184fbe45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489446 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f00222-b862-4024-a903-c006f34852fb" containerName="controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489454 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="31f8dcbc-2fc0-4edd-abf1-2f2aa24a89a6" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489465 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" containerName="collect-profiles" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489471 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="867b2e21-3905-4d08-b96c-e23c8240d93d" containerName="route-controller-manager" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489478 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b71a6300-1cfd-4eb0-b75c-7231184cfe79" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489484 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd37ceb-bccf-4efc-914e-0fd8ad2bc992" containerName="marketplace-operator" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489489 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="acccae42-8133-475f-ad53-dbfa434e5e45" containerName="registry-server" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.489856 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.492955 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.493866 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.494319 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.494641 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.495607 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.495950 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.501776 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.505012 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.506628 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.507550 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.509312 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.509540 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.509581 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.509762 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.511895 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.512064 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.516277 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.587833 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.587892 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9x8q\" (UniqueName: \"kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.587953 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588184 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj8ww\" (UniqueName: \"kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588264 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588530 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588571 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588720 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.588783 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689417 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689458 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689482 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689519 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9x8q\" (UniqueName: \"kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689552 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689579 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj8ww\" (UniqueName: \"kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689598 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689634 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.689657 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.690554 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.691114 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.691161 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.691293 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.691456 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.695926 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.696993 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.703545 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9x8q\" (UniqueName: \"kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q\") pod \"route-controller-manager-9769cf756-gmwg7\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.704017 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj8ww\" (UniqueName: \"kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww\") pod \"controller-manager-56cd597557-k4z64\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.804476 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.822400 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:43 crc kubenswrapper[4622]: I1126 11:15:43.949984 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:15:43 crc kubenswrapper[4622]: W1126 11:15:43.954139 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb5aa452_2ca0_4fe5_8032_4ff3151847e0.slice/crio-dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816 WatchSource:0}: Error finding container dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816: Status 404 returned error can't find the container with id dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816 Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.210769 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:15:44 crc kubenswrapper[4622]: W1126 11:15:44.218214 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc73cbbd6_cb95_42a1_a9ab_4cc212c7fb3b.slice/crio-41e2681db8740d78dc7ecadc433fdfb19643341d6709388a778d34468bd9ce97 WatchSource:0}: Error finding container 41e2681db8740d78dc7ecadc433fdfb19643341d6709388a778d34468bd9ce97: Status 404 returned error can't find the container with id 41e2681db8740d78dc7ecadc433fdfb19643341d6709388a778d34468bd9ce97 Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.720638 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05f00222-b862-4024-a903-c006f34852fb" path="/var/lib/kubelet/pods/05f00222-b862-4024-a903-c006f34852fb/volumes" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.721409 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="867b2e21-3905-4d08-b96c-e23c8240d93d" path="/var/lib/kubelet/pods/867b2e21-3905-4d08-b96c-e23c8240d93d/volumes" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.775058 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" event={"ID":"db5aa452-2ca0-4fe5-8032-4ff3151847e0","Type":"ContainerStarted","Data":"2232ed986a4a1b10e26df6e28cd0f3fcb55fa81173f358f47511d61cc5e4449d"} Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.775117 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" event={"ID":"db5aa452-2ca0-4fe5-8032-4ff3151847e0","Type":"ContainerStarted","Data":"dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816"} Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.775345 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.776815 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" event={"ID":"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b","Type":"ContainerStarted","Data":"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834"} Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.776950 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" event={"ID":"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b","Type":"ContainerStarted","Data":"41e2681db8740d78dc7ecadc433fdfb19643341d6709388a778d34468bd9ce97"} Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.777222 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.780847 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.781145 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.791596 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" podStartSLOduration=1.79158201 podStartE2EDuration="1.79158201s" podCreationTimestamp="2025-11-26 11:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:15:44.7915291 +0000 UTC m=+304.382740623" watchObservedRunningTime="2025-11-26 11:15:44.79158201 +0000 UTC m=+304.382793523" Nov 26 11:15:44 crc kubenswrapper[4622]: I1126 11:15:44.822853 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" podStartSLOduration=1.8228323579999999 podStartE2EDuration="1.822832358s" podCreationTimestamp="2025-11-26 11:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:15:44.820216541 +0000 UTC m=+304.411428062" watchObservedRunningTime="2025-11-26 11:15:44.822832358 +0000 UTC m=+304.414043879" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.477685 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldbvn"] Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.478756 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.489696 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldbvn"] Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512000 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-tls\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512114 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gzmw\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-kube-api-access-2gzmw\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512139 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512189 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-bound-sa-token\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512240 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-certificates\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512270 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512328 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.512357 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-trusted-ca\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.534792 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614293 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614354 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-trusted-ca\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614407 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-tls\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614484 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614518 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gzmw\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-kube-api-access-2gzmw\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614554 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-bound-sa-token\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.614701 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-certificates\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.615773 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.616537 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-certificates\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.616882 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-trusted-ca\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.620559 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-registry-tls\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.620909 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.629112 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gzmw\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-kube-api-access-2gzmw\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.629539 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3093d567-efd5-4b92-aaa6-a0eb75a6f8e7-bound-sa-token\") pod \"image-registry-66df7c8f76-ldbvn\" (UID: \"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:51 crc kubenswrapper[4622]: I1126 11:15:51.793359 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:52 crc kubenswrapper[4622]: I1126 11:15:52.162151 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldbvn"] Nov 26 11:15:52 crc kubenswrapper[4622]: I1126 11:15:52.820909 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" event={"ID":"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7","Type":"ContainerStarted","Data":"6041de27bd618fa34c3235beed7a63ceab7abf7a5dca1c815858b12e043aa5c3"} Nov 26 11:15:52 crc kubenswrapper[4622]: I1126 11:15:52.821333 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:15:52 crc kubenswrapper[4622]: I1126 11:15:52.821347 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" event={"ID":"3093d567-efd5-4b92-aaa6-a0eb75a6f8e7","Type":"ContainerStarted","Data":"931f9f4f9b97718f3673cd0f1b2e9de79091c86f4580f4afb4db74e841755a86"} Nov 26 11:15:52 crc kubenswrapper[4622]: I1126 11:15:52.851637 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" podStartSLOduration=1.851610212 podStartE2EDuration="1.851610212s" podCreationTimestamp="2025-11-26 11:15:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:15:52.838059159 +0000 UTC m=+312.429270681" watchObservedRunningTime="2025-11-26 11:15:52.851610212 +0000 UTC m=+312.442821725" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.105141 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.106766 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" podUID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" containerName="route-controller-manager" containerID="cri-o://522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834" gracePeriod=30 Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.542891 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.584731 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca\") pod \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.584811 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9x8q\" (UniqueName: \"kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q\") pod \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.584834 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert\") pod \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.584880 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config\") pod \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\" (UID: \"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b\") " Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.585643 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca" (OuterVolumeSpecName: "client-ca") pod "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" (UID: "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.585690 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config" (OuterVolumeSpecName: "config") pod "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" (UID: "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.589855 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" (UID: "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.590071 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q" (OuterVolumeSpecName: "kube-api-access-h9x8q") pod "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" (UID: "c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b"). InnerVolumeSpecName "kube-api-access-h9x8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.686733 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9x8q\" (UniqueName: \"kubernetes.io/projected/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-kube-api-access-h9x8q\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.686776 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.686792 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.686805 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.874703 4622 generic.go:334] "Generic (PLEG): container finished" podID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" containerID="522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834" exitCode=0 Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.874750 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" event={"ID":"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b","Type":"ContainerDied","Data":"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834"} Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.874779 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.874802 4622 scope.go:117] "RemoveContainer" containerID="522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.874789 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7" event={"ID":"c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b","Type":"ContainerDied","Data":"41e2681db8740d78dc7ecadc433fdfb19643341d6709388a778d34468bd9ce97"} Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.888092 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.890333 4622 scope.go:117] "RemoveContainer" containerID="522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834" Nov 26 11:16:02 crc kubenswrapper[4622]: E1126 11:16:02.890851 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834\": container with ID starting with 522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834 not found: ID does not exist" containerID="522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834" Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.890878 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9769cf756-gmwg7"] Nov 26 11:16:02 crc kubenswrapper[4622]: I1126 11:16:02.890886 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834"} err="failed to get container status \"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834\": rpc error: code = NotFound desc = could not find container \"522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834\": container with ID starting with 522832a05d66b814b9b6a560f37aa48f3a4883c26f659d1dc399148941e85834 not found: ID does not exist" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.536030 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs"] Nov 26 11:16:03 crc kubenswrapper[4622]: E1126 11:16:03.536254 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" containerName="route-controller-manager" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.536268 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" containerName="route-controller-manager" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.536370 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" containerName="route-controller-manager" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.536784 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.538605 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.538773 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.538886 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.538956 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.539081 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.539922 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.547676 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs"] Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.598389 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-client-ca\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.598455 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-serving-cert\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.598576 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg7lz\" (UniqueName: \"kubernetes.io/projected/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-kube-api-access-fg7lz\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.598602 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-config\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.700217 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-client-ca\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.700281 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-serving-cert\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.700373 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg7lz\" (UniqueName: \"kubernetes.io/projected/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-kube-api-access-fg7lz\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.700415 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-config\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.701701 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-config\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.702106 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-client-ca\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.705750 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-serving-cert\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.716903 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg7lz\" (UniqueName: \"kubernetes.io/projected/940cdc3a-224b-486b-bfc0-5ae1cf16bc24-kube-api-access-fg7lz\") pod \"route-controller-manager-6b8bb787c9-8k2gs\" (UID: \"940cdc3a-224b-486b-bfc0-5ae1cf16bc24\") " pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:03 crc kubenswrapper[4622]: I1126 11:16:03.850198 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.205709 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs"] Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.715131 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b" path="/var/lib/kubelet/pods/c73cbbd6-cb95-42a1-a9ab-4cc212c7fb3b/volumes" Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.891109 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" event={"ID":"940cdc3a-224b-486b-bfc0-5ae1cf16bc24","Type":"ContainerStarted","Data":"8b6285ae8748f8fdd6a6d3eb1a2aa9a3af98c59160fae97909dae9a0a44b4389"} Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.891161 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" event={"ID":"940cdc3a-224b-486b-bfc0-5ae1cf16bc24","Type":"ContainerStarted","Data":"b8792e41bd4a58234be394dbbad6ba75d2fd6db5567f6f0bd0c113eafb2b1b25"} Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.891540 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.896346 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" Nov 26 11:16:04 crc kubenswrapper[4622]: I1126 11:16:04.908436 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6b8bb787c9-8k2gs" podStartSLOduration=2.908413888 podStartE2EDuration="2.908413888s" podCreationTimestamp="2025-11-26 11:16:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:16:04.905061671 +0000 UTC m=+324.496273193" watchObservedRunningTime="2025-11-26 11:16:04.908413888 +0000 UTC m=+324.499625410" Nov 26 11:16:11 crc kubenswrapper[4622]: I1126 11:16:11.798273 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-ldbvn" Nov 26 11:16:11 crc kubenswrapper[4622]: I1126 11:16:11.834955 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:16:15 crc kubenswrapper[4622]: I1126 11:16:15.199616 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:16:15 crc kubenswrapper[4622]: I1126 11:16:15.200313 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.320785 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f7b64"] Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.321988 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.323629 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.330344 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f7b64"] Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.352273 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-utilities\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.352323 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7tl7\" (UniqueName: \"kubernetes.io/projected/2dda58d5-346a-4eeb-9942-38221a073d22-kube-api-access-d7tl7\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.352637 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-catalog-content\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.454544 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-utilities\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.454749 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7tl7\" (UniqueName: \"kubernetes.io/projected/2dda58d5-346a-4eeb-9942-38221a073d22-kube-api-access-d7tl7\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.454866 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-catalog-content\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.455239 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-utilities\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.455452 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dda58d5-346a-4eeb-9942-38221a073d22-catalog-content\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.474310 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7tl7\" (UniqueName: \"kubernetes.io/projected/2dda58d5-346a-4eeb-9942-38221a073d22-kube-api-access-d7tl7\") pod \"redhat-operators-f7b64\" (UID: \"2dda58d5-346a-4eeb-9942-38221a073d22\") " pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.639666 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:22 crc kubenswrapper[4622]: I1126 11:16:22.997440 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f7b64"] Nov 26 11:16:23 crc kubenswrapper[4622]: W1126 11:16:23.004293 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dda58d5_346a_4eeb_9942_38221a073d22.slice/crio-adc478462cdabaa0ba9f0a204bbf14870abc3b73b98a6e8693aa071fe28a7f38 WatchSource:0}: Error finding container adc478462cdabaa0ba9f0a204bbf14870abc3b73b98a6e8693aa071fe28a7f38: Status 404 returned error can't find the container with id adc478462cdabaa0ba9f0a204bbf14870abc3b73b98a6e8693aa071fe28a7f38 Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.317892 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pvq22"] Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.319145 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.320830 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.326586 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvq22"] Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.365676 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-utilities\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.365821 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-catalog-content\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.365973 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkghw\" (UniqueName: \"kubernetes.io/projected/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-kube-api-access-nkghw\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.467746 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-catalog-content\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.468118 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkghw\" (UniqueName: \"kubernetes.io/projected/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-kube-api-access-nkghw\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.468243 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-utilities\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.468309 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-catalog-content\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.468572 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-utilities\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.484952 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkghw\" (UniqueName: \"kubernetes.io/projected/33de4658-ea3b-4ebe-bec1-ed6190d6f2c2-kube-api-access-nkghw\") pod \"certified-operators-pvq22\" (UID: \"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2\") " pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.635326 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.989714 4622 generic.go:334] "Generic (PLEG): container finished" podID="2dda58d5-346a-4eeb-9942-38221a073d22" containerID="e8de958f27ac2cd0a04bb2cca115595689a2e6c21bc21aad459f18a2acc888ff" exitCode=0 Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.989772 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7b64" event={"ID":"2dda58d5-346a-4eeb-9942-38221a073d22","Type":"ContainerDied","Data":"e8de958f27ac2cd0a04bb2cca115595689a2e6c21bc21aad459f18a2acc888ff"} Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.989808 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7b64" event={"ID":"2dda58d5-346a-4eeb-9942-38221a073d22","Type":"ContainerStarted","Data":"adc478462cdabaa0ba9f0a204bbf14870abc3b73b98a6e8693aa071fe28a7f38"} Nov 26 11:16:23 crc kubenswrapper[4622]: I1126 11:16:23.994401 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvq22"] Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.720131 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t8s85"] Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.721421 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.723673 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.731522 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t8s85"] Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.785324 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-utilities\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.785377 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-catalog-content\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.785409 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmgjx\" (UniqueName: \"kubernetes.io/projected/37486221-c1f5-4a72-9923-34c65efb3a0f-kube-api-access-cmgjx\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.887554 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-utilities\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.887618 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-catalog-content\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.887654 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmgjx\" (UniqueName: \"kubernetes.io/projected/37486221-c1f5-4a72-9923-34c65efb3a0f-kube-api-access-cmgjx\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.888086 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-catalog-content\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.888250 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37486221-c1f5-4a72-9923-34c65efb3a0f-utilities\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.913863 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmgjx\" (UniqueName: \"kubernetes.io/projected/37486221-c1f5-4a72-9923-34c65efb3a0f-kube-api-access-cmgjx\") pod \"redhat-marketplace-t8s85\" (UID: \"37486221-c1f5-4a72-9923-34c65efb3a0f\") " pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.996553 4622 generic.go:334] "Generic (PLEG): container finished" podID="33de4658-ea3b-4ebe-bec1-ed6190d6f2c2" containerID="e6159ae41a8448818676e2e0250100839aee2e92b9dfdd9458592e8443af0890" exitCode=0 Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.996632 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvq22" event={"ID":"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2","Type":"ContainerDied","Data":"e6159ae41a8448818676e2e0250100839aee2e92b9dfdd9458592e8443af0890"} Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.996919 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvq22" event={"ID":"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2","Type":"ContainerStarted","Data":"92ab5ffed549eb37f180272572847f0d750076b064f590c3d41b2d1961b3f4af"} Nov 26 11:16:24 crc kubenswrapper[4622]: I1126 11:16:24.998761 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7b64" event={"ID":"2dda58d5-346a-4eeb-9942-38221a073d22","Type":"ContainerStarted","Data":"4bcae34b7ebd81d2457ea7cd73d86d25f862d7abcfa11d13e998167448cb679c"} Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.034738 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.394032 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t8s85"] Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.718317 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hlg7h"] Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.719467 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.723382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.724461 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlg7h"] Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.800238 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-utilities\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.800306 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-catalog-content\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.800372 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrnhv\" (UniqueName: \"kubernetes.io/projected/baf7a2ad-d38e-4569-ad12-d346fc0abfed-kube-api-access-qrnhv\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.901380 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-utilities\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.901428 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-catalog-content\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.901472 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrnhv\" (UniqueName: \"kubernetes.io/projected/baf7a2ad-d38e-4569-ad12-d346fc0abfed-kube-api-access-qrnhv\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.902105 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-catalog-content\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.902642 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf7a2ad-d38e-4569-ad12-d346fc0abfed-utilities\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:25 crc kubenswrapper[4622]: I1126 11:16:25.917741 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrnhv\" (UniqueName: \"kubernetes.io/projected/baf7a2ad-d38e-4569-ad12-d346fc0abfed-kube-api-access-qrnhv\") pod \"community-operators-hlg7h\" (UID: \"baf7a2ad-d38e-4569-ad12-d346fc0abfed\") " pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.010248 4622 generic.go:334] "Generic (PLEG): container finished" podID="33de4658-ea3b-4ebe-bec1-ed6190d6f2c2" containerID="37dcac6939fd80c09e2cc5fbd84ee0ecf3b648f7a902dd4d09532cfb59ad3bfa" exitCode=0 Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.010348 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvq22" event={"ID":"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2","Type":"ContainerDied","Data":"37dcac6939fd80c09e2cc5fbd84ee0ecf3b648f7a902dd4d09532cfb59ad3bfa"} Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.015178 4622 generic.go:334] "Generic (PLEG): container finished" podID="2dda58d5-346a-4eeb-9942-38221a073d22" containerID="4bcae34b7ebd81d2457ea7cd73d86d25f862d7abcfa11d13e998167448cb679c" exitCode=0 Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.015310 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7b64" event={"ID":"2dda58d5-346a-4eeb-9942-38221a073d22","Type":"ContainerDied","Data":"4bcae34b7ebd81d2457ea7cd73d86d25f862d7abcfa11d13e998167448cb679c"} Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.017423 4622 generic.go:334] "Generic (PLEG): container finished" podID="37486221-c1f5-4a72-9923-34c65efb3a0f" containerID="1e038f8063844aa2293022bf62559f73fe382233b5b3b1884b8fdcb947e6f8af" exitCode=0 Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.017482 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t8s85" event={"ID":"37486221-c1f5-4a72-9923-34c65efb3a0f","Type":"ContainerDied","Data":"1e038f8063844aa2293022bf62559f73fe382233b5b3b1884b8fdcb947e6f8af"} Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.017535 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t8s85" event={"ID":"37486221-c1f5-4a72-9923-34c65efb3a0f","Type":"ContainerStarted","Data":"8f5bae725e7891473f00a5f0ac18e33b6cb64134715d3f0f91a812d5b0ba4ff4"} Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.071609 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:26 crc kubenswrapper[4622]: I1126 11:16:26.250625 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlg7h"] Nov 26 11:16:26 crc kubenswrapper[4622]: W1126 11:16:26.257148 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbaf7a2ad_d38e_4569_ad12_d346fc0abfed.slice/crio-bbc5e9b90ef2c405e454c1ca3c5f6b1a5d56c2857ac1301a078d647eb1dba1a8 WatchSource:0}: Error finding container bbc5e9b90ef2c405e454c1ca3c5f6b1a5d56c2857ac1301a078d647eb1dba1a8: Status 404 returned error can't find the container with id bbc5e9b90ef2c405e454c1ca3c5f6b1a5d56c2857ac1301a078d647eb1dba1a8 Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.024721 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvq22" event={"ID":"33de4658-ea3b-4ebe-bec1-ed6190d6f2c2","Type":"ContainerStarted","Data":"0ba4bb9462c3da2f4a586e026d09fd311a4edd9946f31be1ea4cefe5e7c2e1a0"} Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.026794 4622 generic.go:334] "Generic (PLEG): container finished" podID="37486221-c1f5-4a72-9923-34c65efb3a0f" containerID="a06602b60db0555bf987963840312b033f0f02e34f78c437a8e4502e859c9c3b" exitCode=0 Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.026848 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t8s85" event={"ID":"37486221-c1f5-4a72-9923-34c65efb3a0f","Type":"ContainerDied","Data":"a06602b60db0555bf987963840312b033f0f02e34f78c437a8e4502e859c9c3b"} Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.030663 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7b64" event={"ID":"2dda58d5-346a-4eeb-9942-38221a073d22","Type":"ContainerStarted","Data":"b7f95f31e78d63f28ce6fe3f963ad901e79f83e0e22be9f4d45150649405ca3f"} Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.032524 4622 generic.go:334] "Generic (PLEG): container finished" podID="baf7a2ad-d38e-4569-ad12-d346fc0abfed" containerID="ae68cd25625483d8bc9aff4657924775328ff0aa45a1006301a52bac11b86c65" exitCode=0 Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.032553 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlg7h" event={"ID":"baf7a2ad-d38e-4569-ad12-d346fc0abfed","Type":"ContainerDied","Data":"ae68cd25625483d8bc9aff4657924775328ff0aa45a1006301a52bac11b86c65"} Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.032571 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlg7h" event={"ID":"baf7a2ad-d38e-4569-ad12-d346fc0abfed","Type":"ContainerStarted","Data":"bbc5e9b90ef2c405e454c1ca3c5f6b1a5d56c2857ac1301a078d647eb1dba1a8"} Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.045055 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pvq22" podStartSLOduration=2.475245682 podStartE2EDuration="4.045035807s" podCreationTimestamp="2025-11-26 11:16:23 +0000 UTC" firstStartedPulling="2025-11-26 11:16:24.998221601 +0000 UTC m=+344.589433123" lastFinishedPulling="2025-11-26 11:16:26.568011725 +0000 UTC m=+346.159223248" observedRunningTime="2025-11-26 11:16:27.041957649 +0000 UTC m=+346.633169171" watchObservedRunningTime="2025-11-26 11:16:27.045035807 +0000 UTC m=+346.636247330" Nov 26 11:16:27 crc kubenswrapper[4622]: I1126 11:16:27.056802 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f7b64" podStartSLOduration=2.287516856 podStartE2EDuration="5.056787606s" podCreationTimestamp="2025-11-26 11:16:22 +0000 UTC" firstStartedPulling="2025-11-26 11:16:23.992728019 +0000 UTC m=+343.583939541" lastFinishedPulling="2025-11-26 11:16:26.761998769 +0000 UTC m=+346.353210291" observedRunningTime="2025-11-26 11:16:27.055185662 +0000 UTC m=+346.646397185" watchObservedRunningTime="2025-11-26 11:16:27.056787606 +0000 UTC m=+346.647999128" Nov 26 11:16:28 crc kubenswrapper[4622]: I1126 11:16:28.039372 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlg7h" event={"ID":"baf7a2ad-d38e-4569-ad12-d346fc0abfed","Type":"ContainerStarted","Data":"20e083740d7ce7b6d63860cf3abe06f61f3fd9da51115fb9978d4835d38031a1"} Nov 26 11:16:28 crc kubenswrapper[4622]: I1126 11:16:28.041417 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t8s85" event={"ID":"37486221-c1f5-4a72-9923-34c65efb3a0f","Type":"ContainerStarted","Data":"79d61a30d56d7dad33c320dc1fb717d9de7b7be014cea1e8ae6d9c45ce403a93"} Nov 26 11:16:28 crc kubenswrapper[4622]: I1126 11:16:28.091464 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t8s85" podStartSLOduration=2.5428403729999998 podStartE2EDuration="4.09142577s" podCreationTimestamp="2025-11-26 11:16:24 +0000 UTC" firstStartedPulling="2025-11-26 11:16:26.018864081 +0000 UTC m=+345.610075604" lastFinishedPulling="2025-11-26 11:16:27.56744948 +0000 UTC m=+347.158661001" observedRunningTime="2025-11-26 11:16:28.086277644 +0000 UTC m=+347.677489176" watchObservedRunningTime="2025-11-26 11:16:28.09142577 +0000 UTC m=+347.682637291" Nov 26 11:16:29 crc kubenswrapper[4622]: I1126 11:16:29.048360 4622 generic.go:334] "Generic (PLEG): container finished" podID="baf7a2ad-d38e-4569-ad12-d346fc0abfed" containerID="20e083740d7ce7b6d63860cf3abe06f61f3fd9da51115fb9978d4835d38031a1" exitCode=0 Nov 26 11:16:29 crc kubenswrapper[4622]: I1126 11:16:29.048418 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlg7h" event={"ID":"baf7a2ad-d38e-4569-ad12-d346fc0abfed","Type":"ContainerDied","Data":"20e083740d7ce7b6d63860cf3abe06f61f3fd9da51115fb9978d4835d38031a1"} Nov 26 11:16:30 crc kubenswrapper[4622]: I1126 11:16:30.055589 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlg7h" event={"ID":"baf7a2ad-d38e-4569-ad12-d346fc0abfed","Type":"ContainerStarted","Data":"3906ef9396d2452edc5af1078a2d7d9d4d8bf6155c604c62329fc608b42f8f57"} Nov 26 11:16:30 crc kubenswrapper[4622]: I1126 11:16:30.073363 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hlg7h" podStartSLOduration=2.433194904 podStartE2EDuration="5.073342903s" podCreationTimestamp="2025-11-26 11:16:25 +0000 UTC" firstStartedPulling="2025-11-26 11:16:27.033548229 +0000 UTC m=+346.624759751" lastFinishedPulling="2025-11-26 11:16:29.673696227 +0000 UTC m=+349.264907750" observedRunningTime="2025-11-26 11:16:30.071080703 +0000 UTC m=+349.662292225" watchObservedRunningTime="2025-11-26 11:16:30.073342903 +0000 UTC m=+349.664554425" Nov 26 11:16:32 crc kubenswrapper[4622]: I1126 11:16:32.640785 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:32 crc kubenswrapper[4622]: I1126 11:16:32.641057 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:32 crc kubenswrapper[4622]: I1126 11:16:32.669911 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:33 crc kubenswrapper[4622]: I1126 11:16:33.105306 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f7b64" Nov 26 11:16:33 crc kubenswrapper[4622]: I1126 11:16:33.635476 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:33 crc kubenswrapper[4622]: I1126 11:16:33.635597 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:33 crc kubenswrapper[4622]: I1126 11:16:33.665141 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:34 crc kubenswrapper[4622]: I1126 11:16:34.116800 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pvq22" Nov 26 11:16:35 crc kubenswrapper[4622]: I1126 11:16:35.035563 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:35 crc kubenswrapper[4622]: I1126 11:16:35.037229 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:35 crc kubenswrapper[4622]: I1126 11:16:35.070574 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:35 crc kubenswrapper[4622]: I1126 11:16:35.116242 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t8s85" Nov 26 11:16:36 crc kubenswrapper[4622]: I1126 11:16:36.072795 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:36 crc kubenswrapper[4622]: I1126 11:16:36.080634 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:36 crc kubenswrapper[4622]: I1126 11:16:36.106517 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:36 crc kubenswrapper[4622]: I1126 11:16:36.136009 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hlg7h" Nov 26 11:16:36 crc kubenswrapper[4622]: I1126 11:16:36.865332 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" podUID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" containerName="registry" containerID="cri-o://c94c4f8ecf4df3241b4abd7b2a2f93ee9b59d2ad6616118594a895b9cbdaf2bb" gracePeriod=30 Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.113901 4622 generic.go:334] "Generic (PLEG): container finished" podID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" containerID="c94c4f8ecf4df3241b4abd7b2a2f93ee9b59d2ad6616118594a895b9cbdaf2bb" exitCode=0 Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.114638 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" event={"ID":"5012bed8-6f9e-47b8-9f71-5eff34c9d997","Type":"ContainerDied","Data":"c94c4f8ecf4df3241b4abd7b2a2f93ee9b59d2ad6616118594a895b9cbdaf2bb"} Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.207562 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.273342 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.273870 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.273938 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.274024 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.274146 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.274189 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.274211 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4srz\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.274250 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls\") pod \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\" (UID: \"5012bed8-6f9e-47b8-9f71-5eff34c9d997\") " Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.275127 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.275173 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.280785 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.283851 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.284218 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.284722 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz" (OuterVolumeSpecName: "kube-api-access-f4srz") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "kube-api-access-f4srz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.285638 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.290075 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5012bed8-6f9e-47b8-9f71-5eff34c9d997" (UID: "5012bed8-6f9e-47b8-9f71-5eff34c9d997"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375847 4622 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375875 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375886 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4srz\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-kube-api-access-f4srz\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375898 4622 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375907 4622 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5012bed8-6f9e-47b8-9f71-5eff34c9d997-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375917 4622 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5012bed8-6f9e-47b8-9f71-5eff34c9d997-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:37 crc kubenswrapper[4622]: I1126 11:16:37.375924 4622 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5012bed8-6f9e-47b8-9f71-5eff34c9d997-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.120891 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" event={"ID":"5012bed8-6f9e-47b8-9f71-5eff34c9d997","Type":"ContainerDied","Data":"33278b3b86ba917e3c05ccced1e5717433afa49ffb4af9f4b045945353760f6b"} Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.120969 4622 scope.go:117] "RemoveContainer" containerID="c94c4f8ecf4df3241b4abd7b2a2f93ee9b59d2ad6616118594a895b9cbdaf2bb" Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.120981 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqt6v" Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.154764 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.155045 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqt6v"] Nov 26 11:16:38 crc kubenswrapper[4622]: I1126 11:16:38.713238 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" path="/var/lib/kubelet/pods/5012bed8-6f9e-47b8-9f71-5eff34c9d997/volumes" Nov 26 11:16:42 crc kubenswrapper[4622]: I1126 11:16:42.083085 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:16:42 crc kubenswrapper[4622]: I1126 11:16:42.083783 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" podUID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" containerName="controller-manager" containerID="cri-o://2232ed986a4a1b10e26df6e28cd0f3fcb55fa81173f358f47511d61cc5e4449d" gracePeriod=30 Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.153441 4622 generic.go:334] "Generic (PLEG): container finished" podID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" containerID="2232ed986a4a1b10e26df6e28cd0f3fcb55fa81173f358f47511d61cc5e4449d" exitCode=0 Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.153564 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" event={"ID":"db5aa452-2ca0-4fe5-8032-4ff3151847e0","Type":"ContainerDied","Data":"2232ed986a4a1b10e26df6e28cd0f3fcb55fa81173f358f47511d61cc5e4449d"} Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.154236 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" event={"ID":"db5aa452-2ca0-4fe5-8032-4ff3151847e0","Type":"ContainerDied","Data":"dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816"} Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.154262 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc5ab310df936df3867d737a1fd23eaa216a1b7e018a58c99ee58846025de816" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.307585 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.330600 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h"] Nov 26 11:16:43 crc kubenswrapper[4622]: E1126 11:16:43.330834 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" containerName="controller-manager" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.330846 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" containerName="controller-manager" Nov 26 11:16:43 crc kubenswrapper[4622]: E1126 11:16:43.330864 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" containerName="registry" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.330870 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" containerName="registry" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.330960 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5012bed8-6f9e-47b8-9f71-5eff34c9d997" containerName="registry" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.330970 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" containerName="controller-manager" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.331341 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.336599 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h"] Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.357801 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles\") pod \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.357937 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca\") pod \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.357984 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zj8ww\" (UniqueName: \"kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww\") pod \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358059 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config\") pod \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358083 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert\") pod \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\" (UID: \"db5aa452-2ca0-4fe5-8032-4ff3151847e0\") " Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358229 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggqn9\" (UniqueName: \"kubernetes.io/projected/eaa2673b-dea0-4911-b2b8-a9f196d7175e-kube-api-access-ggqn9\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358278 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-client-ca\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358351 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-config\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358394 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-proxy-ca-bundles\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.358424 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa2673b-dea0-4911-b2b8-a9f196d7175e-serving-cert\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.359734 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca" (OuterVolumeSpecName: "client-ca") pod "db5aa452-2ca0-4fe5-8032-4ff3151847e0" (UID: "db5aa452-2ca0-4fe5-8032-4ff3151847e0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.359818 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "db5aa452-2ca0-4fe5-8032-4ff3151847e0" (UID: "db5aa452-2ca0-4fe5-8032-4ff3151847e0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.360569 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config" (OuterVolumeSpecName: "config") pod "db5aa452-2ca0-4fe5-8032-4ff3151847e0" (UID: "db5aa452-2ca0-4fe5-8032-4ff3151847e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.369657 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww" (OuterVolumeSpecName: "kube-api-access-zj8ww") pod "db5aa452-2ca0-4fe5-8032-4ff3151847e0" (UID: "db5aa452-2ca0-4fe5-8032-4ff3151847e0"). InnerVolumeSpecName "kube-api-access-zj8ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.372763 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "db5aa452-2ca0-4fe5-8032-4ff3151847e0" (UID: "db5aa452-2ca0-4fe5-8032-4ff3151847e0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.459852 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-proxy-ca-bundles\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.459929 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa2673b-dea0-4911-b2b8-a9f196d7175e-serving-cert\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.459963 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggqn9\" (UniqueName: \"kubernetes.io/projected/eaa2673b-dea0-4911-b2b8-a9f196d7175e-kube-api-access-ggqn9\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460020 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-client-ca\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460110 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-config\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460186 4622 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-client-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460206 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zj8ww\" (UniqueName: \"kubernetes.io/projected/db5aa452-2ca0-4fe5-8032-4ff3151847e0-kube-api-access-zj8ww\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460218 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460226 4622 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5aa452-2ca0-4fe5-8032-4ff3151847e0-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.460234 4622 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db5aa452-2ca0-4fe5-8032-4ff3151847e0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.461286 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-proxy-ca-bundles\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.461426 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-config\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.462053 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaa2673b-dea0-4911-b2b8-a9f196d7175e-client-ca\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.465112 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa2673b-dea0-4911-b2b8-a9f196d7175e-serving-cert\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.475910 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggqn9\" (UniqueName: \"kubernetes.io/projected/eaa2673b-dea0-4911-b2b8-a9f196d7175e-kube-api-access-ggqn9\") pod \"controller-manager-c77b6d4fb-sgg4h\" (UID: \"eaa2673b-dea0-4911-b2b8-a9f196d7175e\") " pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:43 crc kubenswrapper[4622]: I1126 11:16:43.646959 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.018541 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h"] Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.159659 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56cd597557-k4z64" Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.161905 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" event={"ID":"eaa2673b-dea0-4911-b2b8-a9f196d7175e","Type":"ContainerStarted","Data":"99cf497f94df774d2285be7175008e5cecdbf01fee52e712a648944cf389090b"} Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.161954 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" event={"ID":"eaa2673b-dea0-4911-b2b8-a9f196d7175e","Type":"ContainerStarted","Data":"d233758eef1a02a8e5d747b1455764a99747a1546095b8cb3f402b760f495437"} Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.162322 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.163763 4622 patch_prober.go:28] interesting pod/controller-manager-c77b6d4fb-sgg4h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.68:8443/healthz\": dial tcp 10.217.0.68:8443: connect: connection refused" start-of-body= Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.163821 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" podUID="eaa2673b-dea0-4911-b2b8-a9f196d7175e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.68:8443/healthz\": dial tcp 10.217.0.68:8443: connect: connection refused" Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.178895 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" podStartSLOduration=2.178872342 podStartE2EDuration="2.178872342s" podCreationTimestamp="2025-11-26 11:16:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:16:44.175565651 +0000 UTC m=+363.766777173" watchObservedRunningTime="2025-11-26 11:16:44.178872342 +0000 UTC m=+363.770083864" Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.190767 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.195371 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-56cd597557-k4z64"] Nov 26 11:16:44 crc kubenswrapper[4622]: I1126 11:16:44.711379 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db5aa452-2ca0-4fe5-8032-4ff3151847e0" path="/var/lib/kubelet/pods/db5aa452-2ca0-4fe5-8032-4ff3151847e0/volumes" Nov 26 11:16:45 crc kubenswrapper[4622]: I1126 11:16:45.167346 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c77b6d4fb-sgg4h" Nov 26 11:16:45 crc kubenswrapper[4622]: I1126 11:16:45.199010 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:16:45 crc kubenswrapper[4622]: I1126 11:16:45.199075 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.199446 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.200949 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.201095 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.202591 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.202639 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046" gracePeriod=600 Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.344211 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046" exitCode=0 Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.344302 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046"} Nov 26 11:17:15 crc kubenswrapper[4622]: I1126 11:17:15.344400 4622 scope.go:117] "RemoveContainer" containerID="11a13eea59e34f5bbfcac26302b6cd65713e0c136d28047e702d8c0f599b6fa4" Nov 26 11:17:16 crc kubenswrapper[4622]: I1126 11:17:16.352350 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1"} Nov 26 11:19:15 crc kubenswrapper[4622]: I1126 11:19:15.198575 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:19:15 crc kubenswrapper[4622]: I1126 11:19:15.199268 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:19:45 crc kubenswrapper[4622]: I1126 11:19:45.199254 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:19:45 crc kubenswrapper[4622]: I1126 11:19:45.199961 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:20:15 crc kubenswrapper[4622]: I1126 11:20:15.198703 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:20:15 crc kubenswrapper[4622]: I1126 11:20:15.199425 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:20:15 crc kubenswrapper[4622]: I1126 11:20:15.199494 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:20:15 crc kubenswrapper[4622]: I1126 11:20:15.200187 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:20:15 crc kubenswrapper[4622]: I1126 11:20:15.200250 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1" gracePeriod=600 Nov 26 11:20:16 crc kubenswrapper[4622]: I1126 11:20:16.260316 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1" exitCode=0 Nov 26 11:20:16 crc kubenswrapper[4622]: I1126 11:20:16.260374 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1"} Nov 26 11:20:16 crc kubenswrapper[4622]: I1126 11:20:16.261212 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32"} Nov 26 11:20:16 crc kubenswrapper[4622]: I1126 11:20:16.261263 4622 scope.go:117] "RemoveContainer" containerID="f348cd6994d5058f0ac71d77992189f80cff2525f87b9aa4c209a37822c4a046" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.111090 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tcw8n"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.112227 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.114106 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.114218 4622 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-2kqzm" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.114324 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.117146 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hv4z2"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.117917 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hv4z2" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.119727 4622 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-t4npj" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.131976 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjmks"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.132705 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.133891 4622 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6cdlf" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.154921 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjmks"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.161176 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tcw8n"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.170023 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hv4z2"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.258487 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb66m\" (UniqueName: \"kubernetes.io/projected/b4640717-d4ad-4231-aef4-273e00c48ec2-kube-api-access-kb66m\") pod \"cert-manager-webhook-5655c58dd6-fjmks\" (UID: \"b4640717-d4ad-4231-aef4-273e00c48ec2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.258796 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crknq\" (UniqueName: \"kubernetes.io/projected/c7ab3421-7274-4a71-b54a-dc4955028478-kube-api-access-crknq\") pod \"cert-manager-5b446d88c5-hv4z2\" (UID: \"c7ab3421-7274-4a71-b54a-dc4955028478\") " pod="cert-manager/cert-manager-5b446d88c5-hv4z2" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.258878 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p2vm\" (UniqueName: \"kubernetes.io/projected/34de43db-05eb-494e-a5a2-f0a4979dabb5-kube-api-access-7p2vm\") pod \"cert-manager-cainjector-7f985d654d-tcw8n\" (UID: \"34de43db-05eb-494e-a5a2-f0a4979dabb5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.359569 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb66m\" (UniqueName: \"kubernetes.io/projected/b4640717-d4ad-4231-aef4-273e00c48ec2-kube-api-access-kb66m\") pod \"cert-manager-webhook-5655c58dd6-fjmks\" (UID: \"b4640717-d4ad-4231-aef4-273e00c48ec2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.359678 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crknq\" (UniqueName: \"kubernetes.io/projected/c7ab3421-7274-4a71-b54a-dc4955028478-kube-api-access-crknq\") pod \"cert-manager-5b446d88c5-hv4z2\" (UID: \"c7ab3421-7274-4a71-b54a-dc4955028478\") " pod="cert-manager/cert-manager-5b446d88c5-hv4z2" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.359701 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p2vm\" (UniqueName: \"kubernetes.io/projected/34de43db-05eb-494e-a5a2-f0a4979dabb5-kube-api-access-7p2vm\") pod \"cert-manager-cainjector-7f985d654d-tcw8n\" (UID: \"34de43db-05eb-494e-a5a2-f0a4979dabb5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.375466 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p2vm\" (UniqueName: \"kubernetes.io/projected/34de43db-05eb-494e-a5a2-f0a4979dabb5-kube-api-access-7p2vm\") pod \"cert-manager-cainjector-7f985d654d-tcw8n\" (UID: \"34de43db-05eb-494e-a5a2-f0a4979dabb5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.375464 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crknq\" (UniqueName: \"kubernetes.io/projected/c7ab3421-7274-4a71-b54a-dc4955028478-kube-api-access-crknq\") pod \"cert-manager-5b446d88c5-hv4z2\" (UID: \"c7ab3421-7274-4a71-b54a-dc4955028478\") " pod="cert-manager/cert-manager-5b446d88c5-hv4z2" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.375788 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb66m\" (UniqueName: \"kubernetes.io/projected/b4640717-d4ad-4231-aef4-273e00c48ec2-kube-api-access-kb66m\") pod \"cert-manager-webhook-5655c58dd6-fjmks\" (UID: \"b4640717-d4ad-4231-aef4-273e00c48ec2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.426676 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.432802 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hv4z2" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.442726 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.795151 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hv4z2"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.801180 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.822768 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjmks"] Nov 26 11:20:27 crc kubenswrapper[4622]: I1126 11:20:27.825908 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tcw8n"] Nov 26 11:20:27 crc kubenswrapper[4622]: W1126 11:20:27.826122 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4640717_d4ad_4231_aef4_273e00c48ec2.slice/crio-a53f30b4f2999e0573a84570ee44075c5d33e9cc163a8790b65e6291613719e5 WatchSource:0}: Error finding container a53f30b4f2999e0573a84570ee44075c5d33e9cc163a8790b65e6291613719e5: Status 404 returned error can't find the container with id a53f30b4f2999e0573a84570ee44075c5d33e9cc163a8790b65e6291613719e5 Nov 26 11:20:27 crc kubenswrapper[4622]: W1126 11:20:27.829560 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34de43db_05eb_494e_a5a2_f0a4979dabb5.slice/crio-f3fcdbad3810a29d630b50d0723eda5126f5c5b61269da868124a136480d3b8a WatchSource:0}: Error finding container f3fcdbad3810a29d630b50d0723eda5126f5c5b61269da868124a136480d3b8a: Status 404 returned error can't find the container with id f3fcdbad3810a29d630b50d0723eda5126f5c5b61269da868124a136480d3b8a Nov 26 11:20:28 crc kubenswrapper[4622]: I1126 11:20:28.329460 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" event={"ID":"b4640717-d4ad-4231-aef4-273e00c48ec2","Type":"ContainerStarted","Data":"a53f30b4f2999e0573a84570ee44075c5d33e9cc163a8790b65e6291613719e5"} Nov 26 11:20:28 crc kubenswrapper[4622]: I1126 11:20:28.330336 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hv4z2" event={"ID":"c7ab3421-7274-4a71-b54a-dc4955028478","Type":"ContainerStarted","Data":"00197ffd17698b12233d85c03eea1abd5e74ce7770ba904134bb683848ef75ef"} Nov 26 11:20:28 crc kubenswrapper[4622]: I1126 11:20:28.331287 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" event={"ID":"34de43db-05eb-494e-a5a2-f0a4979dabb5","Type":"ContainerStarted","Data":"f3fcdbad3810a29d630b50d0723eda5126f5c5b61269da868124a136480d3b8a"} Nov 26 11:20:30 crc kubenswrapper[4622]: I1126 11:20:30.342875 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hv4z2" event={"ID":"c7ab3421-7274-4a71-b54a-dc4955028478","Type":"ContainerStarted","Data":"b71a690a85cc35a05452151046a357ac92ad50e81196bf0abe04e450ba34f84a"} Nov 26 11:20:30 crc kubenswrapper[4622]: I1126 11:20:30.354287 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-hv4z2" podStartSLOduration=1.41169132 podStartE2EDuration="3.354269956s" podCreationTimestamp="2025-11-26 11:20:27 +0000 UTC" firstStartedPulling="2025-11-26 11:20:27.80096321 +0000 UTC m=+587.392174732" lastFinishedPulling="2025-11-26 11:20:29.743541846 +0000 UTC m=+589.334753368" observedRunningTime="2025-11-26 11:20:30.353434991 +0000 UTC m=+589.944646533" watchObservedRunningTime="2025-11-26 11:20:30.354269956 +0000 UTC m=+589.945481479" Nov 26 11:20:31 crc kubenswrapper[4622]: I1126 11:20:31.348981 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" event={"ID":"b4640717-d4ad-4231-aef4-273e00c48ec2","Type":"ContainerStarted","Data":"3685d4e83ac8a3f9a6ba785eed2c90fc0c47ac0bb99d7f242c8fd8767d4c5543"} Nov 26 11:20:31 crc kubenswrapper[4622]: I1126 11:20:31.349123 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:31 crc kubenswrapper[4622]: I1126 11:20:31.350473 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" event={"ID":"34de43db-05eb-494e-a5a2-f0a4979dabb5","Type":"ContainerStarted","Data":"a26b28eaa3dfdcc3b2b9f8180399bd0d0c78de94ce8fce9342499324b6ee3ccc"} Nov 26 11:20:31 crc kubenswrapper[4622]: I1126 11:20:31.361159 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" podStartSLOduration=1.5771053350000002 podStartE2EDuration="4.361143173s" podCreationTimestamp="2025-11-26 11:20:27 +0000 UTC" firstStartedPulling="2025-11-26 11:20:27.828085189 +0000 UTC m=+587.419296711" lastFinishedPulling="2025-11-26 11:20:30.612123027 +0000 UTC m=+590.203334549" observedRunningTime="2025-11-26 11:20:31.359096591 +0000 UTC m=+590.950308113" watchObservedRunningTime="2025-11-26 11:20:31.361143173 +0000 UTC m=+590.952354695" Nov 26 11:20:31 crc kubenswrapper[4622]: I1126 11:20:31.372960 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-tcw8n" podStartSLOduration=1.5912541789999999 podStartE2EDuration="4.37294037s" podCreationTimestamp="2025-11-26 11:20:27 +0000 UTC" firstStartedPulling="2025-11-26 11:20:27.831298651 +0000 UTC m=+587.422510174" lastFinishedPulling="2025-11-26 11:20:30.612984843 +0000 UTC m=+590.204196365" observedRunningTime="2025-11-26 11:20:31.371534648 +0000 UTC m=+590.962746170" watchObservedRunningTime="2025-11-26 11:20:31.37294037 +0000 UTC m=+590.964151881" Nov 26 11:20:37 crc kubenswrapper[4622]: I1126 11:20:37.445713 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjmks" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651119 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qx5dc"] Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651480 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-controller" containerID="cri-o://88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651605 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-node" containerID="cri-o://322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651622 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="northd" containerID="cri-o://5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651614 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651652 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-acl-logging" containerID="cri-o://9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651782 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="nbdb" containerID="cri-o://46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.651738 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="sbdb" containerID="cri-o://696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.676639 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" containerID="cri-o://651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" gracePeriod=30 Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.896688 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/3.log" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.899282 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovn-acl-logging/0.log" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.901230 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovn-controller/0.log" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.901740 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.947169 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cxh54"] Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.947614 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="nbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.947711 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="nbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.947790 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.947843 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.947891 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kubecfg-setup" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.947952 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kubecfg-setup" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948009 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-node" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948059 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-node" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948106 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948147 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948193 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="northd" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948234 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="northd" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948276 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948315 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948364 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="sbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948406 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="sbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948453 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948516 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948573 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-acl-logging" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948616 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-acl-logging" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.948662 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948707 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948861 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948917 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.948961 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="sbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949004 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949046 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-acl-logging" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949162 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="nbdb" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949211 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949252 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949298 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="kube-rbac-proxy-node" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949347 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovn-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949394 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="northd" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.949549 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949609 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: E1126 11:20:38.949660 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949704 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.949842 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerName="ovnkube-controller" Nov 26 11:20:38 crc kubenswrapper[4622]: I1126 11:20:38.951295 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014443 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014484 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014529 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014561 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014576 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014579 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket" (OuterVolumeSpecName: "log-socket") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014604 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014598 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014626 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014621 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014642 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014657 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log" (OuterVolumeSpecName: "node-log") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014670 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014708 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014777 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014800 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014828 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014849 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014867 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014892 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014911 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014931 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014966 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.014998 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015023 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015039 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch\") pod \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\" (UID: \"9cf9b509-1f95-4119-a348-92cba5fc8bb9\") " Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015043 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015078 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash" (OuterVolumeSpecName: "host-slash") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015100 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015112 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015167 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015195 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015214 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015228 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015485 4622 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-slash\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015535 4622 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015524 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015575 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015549 4622 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015627 4622 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015641 4622 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015652 4622 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015664 4622 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-log-socket\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015675 4622 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015684 4622 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015696 4622 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-node-log\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015705 4622 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015716 4622 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015731 4622 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015740 4622 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.015984 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.020387 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744" (OuterVolumeSpecName: "kube-api-access-49744") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "kube-api-access-49744". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.020399 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.026491 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "9cf9b509-1f95-4119-a348-92cba5fc8bb9" (UID: "9cf9b509-1f95-4119-a348-92cba5fc8bb9"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.116699 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-netd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.116786 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.116955 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-netns\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117010 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-systemd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117080 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117159 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-etc-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117216 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117251 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-systemd-units\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117269 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-env-overrides\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117324 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-slash\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117358 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-ovn\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117391 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-log-socket\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117417 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-config\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117443 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-script-lib\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117468 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-var-lib-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117520 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxl5f\" (UniqueName: \"kubernetes.io/projected/b070621f-8e22-4852-bd89-8a4eddf74b23-kube-api-access-hxl5f\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117549 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-kubelet\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117574 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b070621f-8e22-4852-bd89-8a4eddf74b23-ovn-node-metrics-cert\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117616 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-node-log\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117644 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-bin\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117730 4622 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117749 4622 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117761 4622 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9cf9b509-1f95-4119-a348-92cba5fc8bb9-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117788 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49744\" (UniqueName: \"kubernetes.io/projected/9cf9b509-1f95-4119-a348-92cba5fc8bb9-kube-api-access-49744\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117798 4622 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.117808 4622 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9cf9b509-1f95-4119-a348-92cba5fc8bb9-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219110 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b070621f-8e22-4852-bd89-8a4eddf74b23-ovn-node-metrics-cert\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219158 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-node-log\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219176 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-bin\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219204 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-netd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219225 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219248 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-netns\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219262 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-systemd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219296 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219298 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-bin\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219319 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-etc-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219331 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-cni-netd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219346 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-etc-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219353 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219300 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-node-log\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219376 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219383 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-systemd-units\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219376 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219418 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-systemd-units\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219403 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219425 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-systemd\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219407 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-run-netns\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219398 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-env-overrides\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219585 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-slash\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219621 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-ovn\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219643 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-log-socket\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219667 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-config\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219687 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-script-lib\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219693 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-log-socket\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219702 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-var-lib-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219692 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-slash\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219699 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-run-ovn\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219726 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxl5f\" (UniqueName: \"kubernetes.io/projected/b070621f-8e22-4852-bd89-8a4eddf74b23-kube-api-access-hxl5f\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219759 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-kubelet\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219756 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-var-lib-openvswitch\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.219820 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b070621f-8e22-4852-bd89-8a4eddf74b23-host-kubelet\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.220038 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-env-overrides\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.220244 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-script-lib\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.220317 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b070621f-8e22-4852-bd89-8a4eddf74b23-ovnkube-config\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.222618 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b070621f-8e22-4852-bd89-8a4eddf74b23-ovn-node-metrics-cert\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.233748 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxl5f\" (UniqueName: \"kubernetes.io/projected/b070621f-8e22-4852-bd89-8a4eddf74b23-kube-api-access-hxl5f\") pod \"ovnkube-node-cxh54\" (UID: \"b070621f-8e22-4852-bd89-8a4eddf74b23\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.263490 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.395419 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovnkube-controller/3.log" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.398201 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovn-acl-logging/0.log" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.398891 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-qx5dc_9cf9b509-1f95-4119-a348-92cba5fc8bb9/ovn-controller/0.log" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399300 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399328 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399337 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399344 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399354 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399362 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399369 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" exitCode=143 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399377 4622 generic.go:334] "Generic (PLEG): container finished" podID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" exitCode=143 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399370 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399391 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399417 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399432 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399443 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399453 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399465 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399479 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399492 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399515 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399522 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399527 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399529 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399532 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399615 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399623 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399630 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399642 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399653 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399660 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399666 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399671 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399678 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399683 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399690 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399694 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399699 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399705 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399712 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399720 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399728 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399733 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399738 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399743 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399747 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399752 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399756 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399760 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399774 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399781 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qx5dc" event={"ID":"9cf9b509-1f95-4119-a348-92cba5fc8bb9","Type":"ContainerDied","Data":"52ed3b80dc23320df26ea32271abb5917b693e789c175fd038a2489b30471874"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399790 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399796 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399801 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399806 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399811 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399815 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399820 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399824 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399829 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.399834 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.401314 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/2.log" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.402106 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/1.log" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.402240 4622 generic.go:334] "Generic (PLEG): container finished" podID="fc4efcee-b872-406d-a694-3572222a8dfc" containerID="00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43" exitCode=2 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.402334 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerDied","Data":"00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.402388 4622 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.403036 4622 scope.go:117] "RemoveContainer" containerID="00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.404173 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vmw42_openshift-multus(fc4efcee-b872-406d-a694-3572222a8dfc)\"" pod="openshift-multus/multus-vmw42" podUID="fc4efcee-b872-406d-a694-3572222a8dfc" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.408056 4622 generic.go:334] "Generic (PLEG): container finished" podID="b070621f-8e22-4852-bd89-8a4eddf74b23" containerID="8d0131d3c55a97c00f07498c222faa9821567df03efb7bd53a32be820a9cbf2e" exitCode=0 Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.408149 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerDied","Data":"8d0131d3c55a97c00f07498c222faa9821567df03efb7bd53a32be820a9cbf2e"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.408236 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"4884df9fb04aced668e7f06ae1bec5a32f9bb044c39bcadeb9023ad42d35da1c"} Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.449539 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.464717 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qx5dc"] Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.469704 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qx5dc"] Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.475543 4622 scope.go:117] "RemoveContainer" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.488328 4622 scope.go:117] "RemoveContainer" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.503167 4622 scope.go:117] "RemoveContainer" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.514109 4622 scope.go:117] "RemoveContainer" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.527598 4622 scope.go:117] "RemoveContainer" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.539191 4622 scope.go:117] "RemoveContainer" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.563195 4622 scope.go:117] "RemoveContainer" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.578258 4622 scope.go:117] "RemoveContainer" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.594935 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.595192 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.595232 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} err="failed to get container status \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.595256 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.595678 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": container with ID starting with 48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818 not found: ID does not exist" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.595703 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} err="failed to get container status \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": rpc error: code = NotFound desc = could not find container \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": container with ID starting with 48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.595720 4622 scope.go:117] "RemoveContainer" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.596005 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": container with ID starting with 696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835 not found: ID does not exist" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596029 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} err="failed to get container status \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": rpc error: code = NotFound desc = could not find container \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": container with ID starting with 696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596043 4622 scope.go:117] "RemoveContainer" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.596253 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": container with ID starting with 46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05 not found: ID does not exist" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596274 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} err="failed to get container status \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": rpc error: code = NotFound desc = could not find container \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": container with ID starting with 46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596288 4622 scope.go:117] "RemoveContainer" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.596460 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": container with ID starting with 5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818 not found: ID does not exist" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596483 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} err="failed to get container status \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": rpc error: code = NotFound desc = could not find container \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": container with ID starting with 5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596497 4622 scope.go:117] "RemoveContainer" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.596776 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": container with ID starting with 04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea not found: ID does not exist" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596796 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} err="failed to get container status \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": rpc error: code = NotFound desc = could not find container \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": container with ID starting with 04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.596809 4622 scope.go:117] "RemoveContainer" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.597456 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": container with ID starting with 322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c not found: ID does not exist" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.597482 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} err="failed to get container status \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": rpc error: code = NotFound desc = could not find container \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": container with ID starting with 322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.597494 4622 scope.go:117] "RemoveContainer" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.598010 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": container with ID starting with 9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d not found: ID does not exist" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.598034 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} err="failed to get container status \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": rpc error: code = NotFound desc = could not find container \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": container with ID starting with 9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.598046 4622 scope.go:117] "RemoveContainer" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.598406 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": container with ID starting with 88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121 not found: ID does not exist" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.598452 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} err="failed to get container status \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": rpc error: code = NotFound desc = could not find container \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": container with ID starting with 88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.598487 4622 scope.go:117] "RemoveContainer" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: E1126 11:20:39.598997 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": container with ID starting with 93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf not found: ID does not exist" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.599053 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} err="failed to get container status \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": rpc error: code = NotFound desc = could not find container \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": container with ID starting with 93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.599068 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600083 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} err="failed to get container status \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600098 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600312 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} err="failed to get container status \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": rpc error: code = NotFound desc = could not find container \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": container with ID starting with 48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600335 4622 scope.go:117] "RemoveContainer" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600661 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} err="failed to get container status \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": rpc error: code = NotFound desc = could not find container \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": container with ID starting with 696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.600732 4622 scope.go:117] "RemoveContainer" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601242 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} err="failed to get container status \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": rpc error: code = NotFound desc = could not find container \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": container with ID starting with 46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601262 4622 scope.go:117] "RemoveContainer" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601555 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} err="failed to get container status \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": rpc error: code = NotFound desc = could not find container \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": container with ID starting with 5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601571 4622 scope.go:117] "RemoveContainer" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601789 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} err="failed to get container status \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": rpc error: code = NotFound desc = could not find container \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": container with ID starting with 04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.601809 4622 scope.go:117] "RemoveContainer" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.602041 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} err="failed to get container status \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": rpc error: code = NotFound desc = could not find container \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": container with ID starting with 322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.602063 4622 scope.go:117] "RemoveContainer" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.602848 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} err="failed to get container status \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": rpc error: code = NotFound desc = could not find container \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": container with ID starting with 9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.602870 4622 scope.go:117] "RemoveContainer" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.603319 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} err="failed to get container status \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": rpc error: code = NotFound desc = could not find container \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": container with ID starting with 88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.603339 4622 scope.go:117] "RemoveContainer" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.603684 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} err="failed to get container status \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": rpc error: code = NotFound desc = could not find container \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": container with ID starting with 93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.603704 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604166 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} err="failed to get container status \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604207 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604558 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} err="failed to get container status \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": rpc error: code = NotFound desc = could not find container \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": container with ID starting with 48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604582 4622 scope.go:117] "RemoveContainer" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604895 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} err="failed to get container status \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": rpc error: code = NotFound desc = could not find container \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": container with ID starting with 696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.604943 4622 scope.go:117] "RemoveContainer" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.605353 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} err="failed to get container status \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": rpc error: code = NotFound desc = could not find container \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": container with ID starting with 46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.605375 4622 scope.go:117] "RemoveContainer" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.605716 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} err="failed to get container status \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": rpc error: code = NotFound desc = could not find container \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": container with ID starting with 5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.605736 4622 scope.go:117] "RemoveContainer" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606027 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} err="failed to get container status \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": rpc error: code = NotFound desc = could not find container \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": container with ID starting with 04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606055 4622 scope.go:117] "RemoveContainer" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606368 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} err="failed to get container status \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": rpc error: code = NotFound desc = could not find container \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": container with ID starting with 322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606391 4622 scope.go:117] "RemoveContainer" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606635 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} err="failed to get container status \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": rpc error: code = NotFound desc = could not find container \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": container with ID starting with 9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.606659 4622 scope.go:117] "RemoveContainer" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.607217 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} err="failed to get container status \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": rpc error: code = NotFound desc = could not find container \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": container with ID starting with 88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.607251 4622 scope.go:117] "RemoveContainer" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.607792 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} err="failed to get container status \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": rpc error: code = NotFound desc = could not find container \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": container with ID starting with 93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.607819 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608081 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} err="failed to get container status \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608103 4622 scope.go:117] "RemoveContainer" containerID="48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608479 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818"} err="failed to get container status \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": rpc error: code = NotFound desc = could not find container \"48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818\": container with ID starting with 48a1ac35e79b440c5017b730b338cf4de365eba673ab7b525e0b6694acb3c818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608516 4622 scope.go:117] "RemoveContainer" containerID="696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608825 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835"} err="failed to get container status \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": rpc error: code = NotFound desc = could not find container \"696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835\": container with ID starting with 696734e5d773b63786c3c1461a3720b2ed0da22bfb4c9a44f961c09b62832835 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.608848 4622 scope.go:117] "RemoveContainer" containerID="46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609219 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05"} err="failed to get container status \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": rpc error: code = NotFound desc = could not find container \"46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05\": container with ID starting with 46c62537cb46fd27f6cfdb34938031c5bcdf53fb9ac82d84096e181863f00e05 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609240 4622 scope.go:117] "RemoveContainer" containerID="5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609633 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818"} err="failed to get container status \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": rpc error: code = NotFound desc = could not find container \"5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818\": container with ID starting with 5c712ff440559aa2fd7a6cccad97f4cd674c6e3d7de96d75da3f032680ef7818 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609654 4622 scope.go:117] "RemoveContainer" containerID="04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609904 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea"} err="failed to get container status \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": rpc error: code = NotFound desc = could not find container \"04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea\": container with ID starting with 04902091b18c50c6844a7688bd95cf6145eb516c61cb63adc2ff28179f9a17ea not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.609925 4622 scope.go:117] "RemoveContainer" containerID="322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.612182 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c"} err="failed to get container status \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": rpc error: code = NotFound desc = could not find container \"322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c\": container with ID starting with 322b8f2c66077702d617d5934233f006f758c9f0f6699662d4cd2d9612523a2c not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.612205 4622 scope.go:117] "RemoveContainer" containerID="9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.612699 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d"} err="failed to get container status \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": rpc error: code = NotFound desc = could not find container \"9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d\": container with ID starting with 9c2413084ea9ff9a229b488575f84b8e7d63664f8bc3b599c45c10af6141b01d not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.612720 4622 scope.go:117] "RemoveContainer" containerID="88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.613078 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121"} err="failed to get container status \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": rpc error: code = NotFound desc = could not find container \"88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121\": container with ID starting with 88f6a109b929eb804be92d2fd4e6bc298f11d307c2b7c8102cf2140c0ee9b121 not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.613099 4622 scope.go:117] "RemoveContainer" containerID="93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.613338 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf"} err="failed to get container status \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": rpc error: code = NotFound desc = could not find container \"93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf\": container with ID starting with 93a2bb8005cf1c73df7aa776cdf276d48fdb89b880c58deac4dbff087167a4bf not found: ID does not exist" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.613364 4622 scope.go:117] "RemoveContainer" containerID="651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747" Nov 26 11:20:39 crc kubenswrapper[4622]: I1126 11:20:39.613626 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747"} err="failed to get container status \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": rpc error: code = NotFound desc = could not find container \"651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747\": container with ID starting with 651580059f1e834c30c5344cc7d35dfb51bd823e76a1cdd2872cc356ec181747 not found: ID does not exist" Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417347 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"1b4be85e2597c4f40ec1495c9d53ebe0fdd1c1d2c8be9088157fa4026e657979"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417645 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"33d9c3c3e61bafeb7b618c49cc4d59ea9275e19ce40f94abd70d757863f4d805"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417657 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"c938a3e4f0cef71be8771f0e1768ae6fe0a0cf2d60a30b1cf755aa5f196cb244"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417665 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"b30734d03090c0d59340310e35572a88b378c7969c76220d7dbe52d859205d33"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417673 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"bbdcb13b10790b61ffefcb327c6557da527b78adc76c00364f9821c8ef98b34e"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.417681 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"50f0a7bf6939d1cf025817c8515719a4ae5b4436c7ebb5b9f2422e13c9ac6e9d"} Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.715019 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cf9b509-1f95-4119-a348-92cba5fc8bb9" path="/var/lib/kubelet/pods/9cf9b509-1f95-4119-a348-92cba5fc8bb9/volumes" Nov 26 11:20:40 crc kubenswrapper[4622]: I1126 11:20:40.860341 4622 scope.go:117] "RemoveContainer" containerID="56dc4e957dfc7b5545751f3cb685d899d2cb1aec53d04cdca0c651a03815f3a0" Nov 26 11:20:41 crc kubenswrapper[4622]: I1126 11:20:41.424690 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/2.log" Nov 26 11:20:42 crc kubenswrapper[4622]: I1126 11:20:42.434006 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"c02da33853c94d1c90607f796fb2a220377e621a907e5cb58b716b3c9a23aeb7"} Nov 26 11:20:44 crc kubenswrapper[4622]: I1126 11:20:44.449983 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" event={"ID":"b070621f-8e22-4852-bd89-8a4eddf74b23","Type":"ContainerStarted","Data":"5d196cfc344519e2f5569fd0ea372f71e2e3507eb131b387a23835fff2ea5258"} Nov 26 11:20:44 crc kubenswrapper[4622]: I1126 11:20:44.450344 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:44 crc kubenswrapper[4622]: I1126 11:20:44.450363 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:44 crc kubenswrapper[4622]: I1126 11:20:44.477803 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:44 crc kubenswrapper[4622]: I1126 11:20:44.480717 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" podStartSLOduration=6.480697421 podStartE2EDuration="6.480697421s" podCreationTimestamp="2025-11-26 11:20:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:20:44.477572987 +0000 UTC m=+604.068784509" watchObservedRunningTime="2025-11-26 11:20:44.480697421 +0000 UTC m=+604.071908943" Nov 26 11:20:45 crc kubenswrapper[4622]: I1126 11:20:45.454293 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:45 crc kubenswrapper[4622]: I1126 11:20:45.481656 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:20:53 crc kubenswrapper[4622]: I1126 11:20:53.705876 4622 scope.go:117] "RemoveContainer" containerID="00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43" Nov 26 11:20:53 crc kubenswrapper[4622]: E1126 11:20:53.706993 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vmw42_openshift-multus(fc4efcee-b872-406d-a694-3572222a8dfc)\"" pod="openshift-multus/multus-vmw42" podUID="fc4efcee-b872-406d-a694-3572222a8dfc" Nov 26 11:21:05 crc kubenswrapper[4622]: I1126 11:21:05.705882 4622 scope.go:117] "RemoveContainer" containerID="00579d8535f5e85e068296e9b3cdd883e2e3800c771b963c3ee492fac0bcdd43" Nov 26 11:21:06 crc kubenswrapper[4622]: I1126 11:21:06.563142 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vmw42_fc4efcee-b872-406d-a694-3572222a8dfc/kube-multus/2.log" Nov 26 11:21:06 crc kubenswrapper[4622]: I1126 11:21:06.563400 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vmw42" event={"ID":"fc4efcee-b872-406d-a694-3572222a8dfc","Type":"ContainerStarted","Data":"d33952d22e74bcca60b3c1e0691092c18f18aea5f093dcccaecb6484980ed6d5"} Nov 26 11:21:09 crc kubenswrapper[4622]: I1126 11:21:09.280590 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxh54" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.475434 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm"] Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.476560 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.478123 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.486350 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm"] Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.620303 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.620367 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.620527 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gprq\" (UniqueName: \"kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.721537 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gprq\" (UniqueName: \"kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.721772 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.721850 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.722200 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.722331 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.738059 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gprq\" (UniqueName: \"kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.789404 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:10 crc kubenswrapper[4622]: I1126 11:21:10.922147 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm"] Nov 26 11:21:11 crc kubenswrapper[4622]: I1126 11:21:11.591861 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerStarted","Data":"1f2bdb1785adacf456f7dfecdcd5dd276baaa49d83bbd63cec89b62f7f49bcc6"} Nov 26 11:21:11 crc kubenswrapper[4622]: I1126 11:21:11.591903 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerStarted","Data":"9a615b4c8c81b66073f674c3560bbff3830a1f8e33881e1e9d777cc81c1918c6"} Nov 26 11:21:12 crc kubenswrapper[4622]: I1126 11:21:12.598904 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerID="1f2bdb1785adacf456f7dfecdcd5dd276baaa49d83bbd63cec89b62f7f49bcc6" exitCode=0 Nov 26 11:21:12 crc kubenswrapper[4622]: I1126 11:21:12.599170 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerDied","Data":"1f2bdb1785adacf456f7dfecdcd5dd276baaa49d83bbd63cec89b62f7f49bcc6"} Nov 26 11:21:14 crc kubenswrapper[4622]: I1126 11:21:14.612976 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerID="09067457230fcdb6d316296ca9f122ec485eb10986e78c172096b1fdde6c2565" exitCode=0 Nov 26 11:21:14 crc kubenswrapper[4622]: I1126 11:21:14.613075 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerDied","Data":"09067457230fcdb6d316296ca9f122ec485eb10986e78c172096b1fdde6c2565"} Nov 26 11:21:15 crc kubenswrapper[4622]: I1126 11:21:15.620625 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerID="918ab91d1853907d433800ea3201fa09150529e57753c7a835ab5c4549d9d632" exitCode=0 Nov 26 11:21:15 crc kubenswrapper[4622]: I1126 11:21:15.620682 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerDied","Data":"918ab91d1853907d433800ea3201fa09150529e57753c7a835ab5c4549d9d632"} Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.793714 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.990173 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util\") pod \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.990237 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle\") pod \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.990262 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gprq\" (UniqueName: \"kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq\") pod \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\" (UID: \"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d\") " Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.990858 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle" (OuterVolumeSpecName: "bundle") pod "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" (UID: "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:21:16 crc kubenswrapper[4622]: I1126 11:21:16.997851 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util" (OuterVolumeSpecName: "util") pod "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" (UID: "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.004730 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq" (OuterVolumeSpecName: "kube-api-access-5gprq") pod "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" (UID: "b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d"). InnerVolumeSpecName "kube-api-access-5gprq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.091750 4622 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.092016 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gprq\" (UniqueName: \"kubernetes.io/projected/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-kube-api-access-5gprq\") on node \"crc\" DevicePath \"\"" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.092030 4622 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d-util\") on node \"crc\" DevicePath \"\"" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.631395 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" event={"ID":"b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d","Type":"ContainerDied","Data":"9a615b4c8c81b66073f674c3560bbff3830a1f8e33881e1e9d777cc81c1918c6"} Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.631438 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a615b4c8c81b66073f674c3560bbff3830a1f8e33881e1e9d777cc81c1918c6" Nov 26 11:21:17 crc kubenswrapper[4622]: I1126 11:21:17.631437 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.463351 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-w28b4"] Nov 26 11:21:19 crc kubenswrapper[4622]: E1126 11:21:19.463577 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="util" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.463591 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="util" Nov 26 11:21:19 crc kubenswrapper[4622]: E1126 11:21:19.463599 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="extract" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.463606 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="extract" Nov 26 11:21:19 crc kubenswrapper[4622]: E1126 11:21:19.463621 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="pull" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.463626 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="pull" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.463736 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d" containerName="extract" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.464120 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.465791 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-tfhxg" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.465859 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.466532 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.471622 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-w28b4"] Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.517726 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mskf5\" (UniqueName: \"kubernetes.io/projected/524f0fb3-a92d-4e3e-b07c-8d7ee8556d05-kube-api-access-mskf5\") pod \"nmstate-operator-557fdffb88-w28b4\" (UID: \"524f0fb3-a92d-4e3e-b07c-8d7ee8556d05\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.618719 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mskf5\" (UniqueName: \"kubernetes.io/projected/524f0fb3-a92d-4e3e-b07c-8d7ee8556d05-kube-api-access-mskf5\") pod \"nmstate-operator-557fdffb88-w28b4\" (UID: \"524f0fb3-a92d-4e3e-b07c-8d7ee8556d05\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.636658 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mskf5\" (UniqueName: \"kubernetes.io/projected/524f0fb3-a92d-4e3e-b07c-8d7ee8556d05-kube-api-access-mskf5\") pod \"nmstate-operator-557fdffb88-w28b4\" (UID: \"524f0fb3-a92d-4e3e-b07c-8d7ee8556d05\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.776176 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" Nov 26 11:21:19 crc kubenswrapper[4622]: I1126 11:21:19.922786 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-w28b4"] Nov 26 11:21:20 crc kubenswrapper[4622]: I1126 11:21:20.645292 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" event={"ID":"524f0fb3-a92d-4e3e-b07c-8d7ee8556d05","Type":"ContainerStarted","Data":"73cec05f2f894bf2102cc5bfb4bf358e41f60fc0cd25d3319dbee9b688b2ce23"} Nov 26 11:21:22 crc kubenswrapper[4622]: I1126 11:21:22.658624 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" event={"ID":"524f0fb3-a92d-4e3e-b07c-8d7ee8556d05","Type":"ContainerStarted","Data":"e16cdd7bd59483be09c34bbcb5cf202808e8004ee021684acb6438c87654f5c3"} Nov 26 11:21:22 crc kubenswrapper[4622]: I1126 11:21:22.683343 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-557fdffb88-w28b4" podStartSLOduration=1.559463854 podStartE2EDuration="3.683320311s" podCreationTimestamp="2025-11-26 11:21:19 +0000 UTC" firstStartedPulling="2025-11-26 11:21:19.931173028 +0000 UTC m=+639.522384551" lastFinishedPulling="2025-11-26 11:21:22.055029486 +0000 UTC m=+641.646241008" observedRunningTime="2025-11-26 11:21:22.681461674 +0000 UTC m=+642.272673197" watchObservedRunningTime="2025-11-26 11:21:22.683320311 +0000 UTC m=+642.274531834" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.452578 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.453346 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.455041 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zmj5j" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.466433 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.476178 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.477060 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.479762 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.483123 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-56hb5"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.483833 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.497727 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563211 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bz7r\" (UniqueName: \"kubernetes.io/projected/4a4797ef-0155-4c03-af71-90ad8b7bb9ce-kube-api-access-5bz7r\") pod \"nmstate-metrics-5dcf9c57c5-xxnmq\" (UID: \"4a4797ef-0155-4c03-af71-90ad8b7bb9ce\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563318 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-ovs-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563344 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/57e7baad-e597-4021-8a23-961efc666e1d-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563363 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts2gj\" (UniqueName: \"kubernetes.io/projected/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-kube-api-access-ts2gj\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563384 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j57vt\" (UniqueName: \"kubernetes.io/projected/57e7baad-e597-4021-8a23-961efc666e1d-kube-api-access-j57vt\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563556 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-dbus-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.563615 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-nmstate-lock\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.584702 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.585495 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.587976 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bzstb" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.588250 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.588552 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.595759 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664548 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-dbus-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664607 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-nmstate-lock\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664646 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f2b214a-5b02-4ad5-906f-eda41d105d39-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664678 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bz7r\" (UniqueName: \"kubernetes.io/projected/4a4797ef-0155-4c03-af71-90ad8b7bb9ce-kube-api-access-5bz7r\") pod \"nmstate-metrics-5dcf9c57c5-xxnmq\" (UID: \"4a4797ef-0155-4c03-af71-90ad8b7bb9ce\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664717 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0f2b214a-5b02-4ad5-906f-eda41d105d39-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664788 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg8gz\" (UniqueName: \"kubernetes.io/projected/0f2b214a-5b02-4ad5-906f-eda41d105d39-kube-api-access-sg8gz\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664830 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-ovs-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664833 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-dbus-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664855 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/57e7baad-e597-4021-8a23-961efc666e1d-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664876 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts2gj\" (UniqueName: \"kubernetes.io/projected/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-kube-api-access-ts2gj\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.664953 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j57vt\" (UniqueName: \"kubernetes.io/projected/57e7baad-e597-4021-8a23-961efc666e1d-kube-api-access-j57vt\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.665192 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-ovs-socket\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.665333 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-nmstate-lock\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.675881 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/57e7baad-e597-4021-8a23-961efc666e1d-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.683446 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j57vt\" (UniqueName: \"kubernetes.io/projected/57e7baad-e597-4021-8a23-961efc666e1d-kube-api-access-j57vt\") pod \"nmstate-webhook-6b89b748d8-4rmtm\" (UID: \"57e7baad-e597-4021-8a23-961efc666e1d\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.683880 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bz7r\" (UniqueName: \"kubernetes.io/projected/4a4797ef-0155-4c03-af71-90ad8b7bb9ce-kube-api-access-5bz7r\") pod \"nmstate-metrics-5dcf9c57c5-xxnmq\" (UID: \"4a4797ef-0155-4c03-af71-90ad8b7bb9ce\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.686141 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts2gj\" (UniqueName: \"kubernetes.io/projected/42a26eeb-75bc-4cf4-80d4-c448ba54cf53-kube-api-access-ts2gj\") pod \"nmstate-handler-56hb5\" (UID: \"42a26eeb-75bc-4cf4-80d4-c448ba54cf53\") " pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.765827 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg8gz\" (UniqueName: \"kubernetes.io/projected/0f2b214a-5b02-4ad5-906f-eda41d105d39-kube-api-access-sg8gz\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.766470 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f2b214a-5b02-4ad5-906f-eda41d105d39-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.766585 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0f2b214a-5b02-4ad5-906f-eda41d105d39-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.767986 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0f2b214a-5b02-4ad5-906f-eda41d105d39-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.768419 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.772098 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f2b214a-5b02-4ad5-906f-eda41d105d39-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.789049 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.789139 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg8gz\" (UniqueName: \"kubernetes.io/projected/0f2b214a-5b02-4ad5-906f-eda41d105d39-kube-api-access-sg8gz\") pod \"nmstate-console-plugin-5874bd7bc5-8zb8q\" (UID: \"0f2b214a-5b02-4ad5-906f-eda41d105d39\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.798817 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.820533 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6b6dc57555-84pkk"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.821255 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.846819 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6b6dc57555-84pkk"] Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.905614 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.968648 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-console-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969030 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wqm5\" (UniqueName: \"kubernetes.io/projected/70908426-d755-4f56-8f69-96a1fe1bdd64-kube-api-access-7wqm5\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969256 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969314 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-oauth-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969356 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-trusted-ca-bundle\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969378 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-oauth-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:23 crc kubenswrapper[4622]: I1126 11:21:23.969443 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-service-ca\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.027777 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq"] Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071466 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-console-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071535 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wqm5\" (UniqueName: \"kubernetes.io/projected/70908426-d755-4f56-8f69-96a1fe1bdd64-kube-api-access-7wqm5\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071632 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071670 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-oauth-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071698 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-oauth-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071712 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-trusted-ca-bundle\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.071757 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-service-ca\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.072783 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-console-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.073261 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-service-ca\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.073517 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-oauth-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.073991 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70908426-d755-4f56-8f69-96a1fe1bdd64-trusted-ca-bundle\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.077224 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm"] Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.078459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-serving-cert\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.078771 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/70908426-d755-4f56-8f69-96a1fe1bdd64-console-oauth-config\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.087825 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wqm5\" (UniqueName: \"kubernetes.io/projected/70908426-d755-4f56-8f69-96a1fe1bdd64-kube-api-access-7wqm5\") pod \"console-6b6dc57555-84pkk\" (UID: \"70908426-d755-4f56-8f69-96a1fe1bdd64\") " pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.146340 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.294534 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6b6dc57555-84pkk"] Nov 26 11:21:24 crc kubenswrapper[4622]: W1126 11:21:24.297880 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70908426_d755_4f56_8f69_96a1fe1bdd64.slice/crio-9d42595377feeef07c476371109b6da3400fe015d89e3ca893e539c0daa0ba29 WatchSource:0}: Error finding container 9d42595377feeef07c476371109b6da3400fe015d89e3ca893e539c0daa0ba29: Status 404 returned error can't find the container with id 9d42595377feeef07c476371109b6da3400fe015d89e3ca893e539c0daa0ba29 Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.333087 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q"] Nov 26 11:21:24 crc kubenswrapper[4622]: W1126 11:21:24.333436 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f2b214a_5b02_4ad5_906f_eda41d105d39.slice/crio-d3d33047e4a79b15eb3cadaebb5cefe57fff7125a0a4768654e1cbeed0b45b59 WatchSource:0}: Error finding container d3d33047e4a79b15eb3cadaebb5cefe57fff7125a0a4768654e1cbeed0b45b59: Status 404 returned error can't find the container with id d3d33047e4a79b15eb3cadaebb5cefe57fff7125a0a4768654e1cbeed0b45b59 Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.672602 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6b6dc57555-84pkk" event={"ID":"70908426-d755-4f56-8f69-96a1fe1bdd64","Type":"ContainerStarted","Data":"c3e2aaef0e3ffccf73b930fc62650364f88ea7dee1274a10d4bcb965f216dc6e"} Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.672955 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6b6dc57555-84pkk" event={"ID":"70908426-d755-4f56-8f69-96a1fe1bdd64","Type":"ContainerStarted","Data":"9d42595377feeef07c476371109b6da3400fe015d89e3ca893e539c0daa0ba29"} Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.673861 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-56hb5" event={"ID":"42a26eeb-75bc-4cf4-80d4-c448ba54cf53","Type":"ContainerStarted","Data":"ed909a14b99933c24ad7899d10becfea303aa5ffae80bdd8d955d0c329cae7b5"} Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.674966 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" event={"ID":"4a4797ef-0155-4c03-af71-90ad8b7bb9ce","Type":"ContainerStarted","Data":"70795b88bfc35fc652a0fb0113092f8690f1b5fe628e000146016f33d210e30e"} Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.676940 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" event={"ID":"57e7baad-e597-4021-8a23-961efc666e1d","Type":"ContainerStarted","Data":"575008abf58dfd3e695bd42f78839769dd45d699f3709eab35e7cf8d6b5fb524"} Nov 26 11:21:24 crc kubenswrapper[4622]: I1126 11:21:24.677951 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" event={"ID":"0f2b214a-5b02-4ad5-906f-eda41d105d39","Type":"ContainerStarted","Data":"d3d33047e4a79b15eb3cadaebb5cefe57fff7125a0a4768654e1cbeed0b45b59"} Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.697956 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-56hb5" event={"ID":"42a26eeb-75bc-4cf4-80d4-c448ba54cf53","Type":"ContainerStarted","Data":"1004c52a5d33db654db0b2529a4326be97b1bf8824d88f4da4516f1f56c45a7b"} Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.698266 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.699540 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" event={"ID":"4a4797ef-0155-4c03-af71-90ad8b7bb9ce","Type":"ContainerStarted","Data":"ace843996dd3cf75c9d102381d0f43b1b1701dde26b194175264282a17a81f6a"} Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.701130 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" event={"ID":"57e7baad-e597-4021-8a23-961efc666e1d","Type":"ContainerStarted","Data":"0eb88d714357f95de80f9ed7d2030755bbf098741613905e2c7a16e7f9d209ef"} Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.701231 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.702533 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" event={"ID":"0f2b214a-5b02-4ad5-906f-eda41d105d39","Type":"ContainerStarted","Data":"4ceaa5b0acf1165ff977426ea1ab270307f74a4b05ab4eb45e08979630b09147"} Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.715739 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-56hb5" podStartSLOduration=1.943906837 podStartE2EDuration="4.715727776s" podCreationTimestamp="2025-11-26 11:21:23 +0000 UTC" firstStartedPulling="2025-11-26 11:21:23.889794518 +0000 UTC m=+643.481006040" lastFinishedPulling="2025-11-26 11:21:26.661615457 +0000 UTC m=+646.252826979" observedRunningTime="2025-11-26 11:21:27.711909462 +0000 UTC m=+647.303120994" watchObservedRunningTime="2025-11-26 11:21:27.715727776 +0000 UTC m=+647.306939288" Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.717204 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6b6dc57555-84pkk" podStartSLOduration=4.7171977179999995 podStartE2EDuration="4.717197718s" podCreationTimestamp="2025-11-26 11:21:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:21:24.68948 +0000 UTC m=+644.280691532" watchObservedRunningTime="2025-11-26 11:21:27.717197718 +0000 UTC m=+647.308409240" Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.725096 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-8zb8q" podStartSLOduration=2.409986923 podStartE2EDuration="4.725076323s" podCreationTimestamp="2025-11-26 11:21:23 +0000 UTC" firstStartedPulling="2025-11-26 11:21:24.335673973 +0000 UTC m=+643.926885495" lastFinishedPulling="2025-11-26 11:21:26.650763373 +0000 UTC m=+646.241974895" observedRunningTime="2025-11-26 11:21:27.722866404 +0000 UTC m=+647.314077926" watchObservedRunningTime="2025-11-26 11:21:27.725076323 +0000 UTC m=+647.316287844" Nov 26 11:21:27 crc kubenswrapper[4622]: I1126 11:21:27.736433 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" podStartSLOduration=2.165295822 podStartE2EDuration="4.736417348s" podCreationTimestamp="2025-11-26 11:21:23 +0000 UTC" firstStartedPulling="2025-11-26 11:21:24.079722159 +0000 UTC m=+643.670933681" lastFinishedPulling="2025-11-26 11:21:26.650843685 +0000 UTC m=+646.242055207" observedRunningTime="2025-11-26 11:21:27.735864807 +0000 UTC m=+647.327076329" watchObservedRunningTime="2025-11-26 11:21:27.736417348 +0000 UTC m=+647.327628871" Nov 26 11:21:29 crc kubenswrapper[4622]: I1126 11:21:29.714062 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" event={"ID":"4a4797ef-0155-4c03-af71-90ad8b7bb9ce","Type":"ContainerStarted","Data":"e5edd3b33bcf162b4bc2f5bc9ce4969dcc169d56e9aa3e7fb34bebeb9540aa89"} Nov 26 11:21:29 crc kubenswrapper[4622]: I1126 11:21:29.728023 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-xxnmq" podStartSLOduration=1.513177233 podStartE2EDuration="6.728004645s" podCreationTimestamp="2025-11-26 11:21:23 +0000 UTC" firstStartedPulling="2025-11-26 11:21:24.034386608 +0000 UTC m=+643.625598130" lastFinishedPulling="2025-11-26 11:21:29.249214019 +0000 UTC m=+648.840425542" observedRunningTime="2025-11-26 11:21:29.727802945 +0000 UTC m=+649.319014466" watchObservedRunningTime="2025-11-26 11:21:29.728004645 +0000 UTC m=+649.319216166" Nov 26 11:21:33 crc kubenswrapper[4622]: I1126 11:21:33.817170 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-56hb5" Nov 26 11:21:34 crc kubenswrapper[4622]: I1126 11:21:34.147330 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:34 crc kubenswrapper[4622]: I1126 11:21:34.147392 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:34 crc kubenswrapper[4622]: I1126 11:21:34.152191 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:34 crc kubenswrapper[4622]: I1126 11:21:34.745121 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6b6dc57555-84pkk" Nov 26 11:21:34 crc kubenswrapper[4622]: I1126 11:21:34.780864 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:21:43 crc kubenswrapper[4622]: I1126 11:21:43.796268 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-4rmtm" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.034647 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k"] Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.036031 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.038144 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.040992 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k"] Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.077204 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbfbl\" (UniqueName: \"kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.077334 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.077418 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.178303 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.178458 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbfbl\" (UniqueName: \"kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.178569 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.178899 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.179017 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.195197 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbfbl\" (UniqueName: \"kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.349715 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.698485 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k"] Nov 26 11:21:54 crc kubenswrapper[4622]: I1126 11:21:54.842152 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" event={"ID":"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8","Type":"ContainerStarted","Data":"d6b9e855b7205b1f96060aacfd8985817348c5e48364b47acba5f25cda473349"} Nov 26 11:21:55 crc kubenswrapper[4622]: I1126 11:21:55.848330 4622 generic.go:334] "Generic (PLEG): container finished" podID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerID="c0179c33f52c716120740f82868b74232bf7997400f0e11a80c8c2ea0053fcee" exitCode=0 Nov 26 11:21:55 crc kubenswrapper[4622]: I1126 11:21:55.848437 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" event={"ID":"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8","Type":"ContainerDied","Data":"c0179c33f52c716120740f82868b74232bf7997400f0e11a80c8c2ea0053fcee"} Nov 26 11:21:57 crc kubenswrapper[4622]: I1126 11:21:57.862232 4622 generic.go:334] "Generic (PLEG): container finished" podID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerID="92bcdfb08bf0c3389aa4739d950f6542654576a0b174ab74b8693388d493d7a8" exitCode=0 Nov 26 11:21:57 crc kubenswrapper[4622]: I1126 11:21:57.862344 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" event={"ID":"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8","Type":"ContainerDied","Data":"92bcdfb08bf0c3389aa4739d950f6542654576a0b174ab74b8693388d493d7a8"} Nov 26 11:21:58 crc kubenswrapper[4622]: I1126 11:21:58.868684 4622 generic.go:334] "Generic (PLEG): container finished" podID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerID="e53983fdc6c9433f32344bee3abc542dc0ee7f6f574571883fe3165f2133e561" exitCode=0 Nov 26 11:21:58 crc kubenswrapper[4622]: I1126 11:21:58.868769 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" event={"ID":"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8","Type":"ContainerDied","Data":"e53983fdc6c9433f32344bee3abc542dc0ee7f6f574571883fe3165f2133e561"} Nov 26 11:21:59 crc kubenswrapper[4622]: I1126 11:21:59.809254 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-r2tzg" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerName="console" containerID="cri-o://01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab" gracePeriod=15 Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.082286 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.256467 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle\") pod \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.256989 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util\") pod \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.257261 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbfbl\" (UniqueName: \"kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl\") pod \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\" (UID: \"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.257858 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle" (OuterVolumeSpecName: "bundle") pod "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" (UID: "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.264289 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl" (OuterVolumeSpecName: "kube-api-access-lbfbl") pod "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" (UID: "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8"). InnerVolumeSpecName "kube-api-access-lbfbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.267089 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util" (OuterVolumeSpecName: "util") pod "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" (UID: "ea5f56ee-1066-4bb3-b055-1550d4fd9fd8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.360075 4622 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.360128 4622 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-util\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.360140 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbfbl\" (UniqueName: \"kubernetes.io/projected/ea5f56ee-1066-4bb3-b055-1550d4fd9fd8-kube-api-access-lbfbl\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.629783 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r2tzg_c58a0d86-56ac-4aaa-b2c9-995c925cd839/console/0.log" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.629904 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664150 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664197 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgmq7\" (UniqueName: \"kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664226 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664248 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664310 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664342 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.664391 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert\") pod \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\" (UID: \"c58a0d86-56ac-4aaa-b2c9-995c925cd839\") " Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665021 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config" (OuterVolumeSpecName: "console-config") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665046 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665030 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665309 4622 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665333 4622 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665344 4622 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.665896 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca" (OuterVolumeSpecName: "service-ca") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.667254 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7" (OuterVolumeSpecName: "kube-api-access-tgmq7") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "kube-api-access-tgmq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.667335 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.667988 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "c58a0d86-56ac-4aaa-b2c9-995c925cd839" (UID: "c58a0d86-56ac-4aaa-b2c9-995c925cd839"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.767265 4622 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.767323 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgmq7\" (UniqueName: \"kubernetes.io/projected/c58a0d86-56ac-4aaa-b2c9-995c925cd839-kube-api-access-tgmq7\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.767340 4622 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58a0d86-56ac-4aaa-b2c9-995c925cd839-service-ca\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.767349 4622 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58a0d86-56ac-4aaa-b2c9-995c925cd839-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886078 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r2tzg_c58a0d86-56ac-4aaa-b2c9-995c925cd839/console/0.log" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886145 4622 generic.go:334] "Generic (PLEG): container finished" podID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerID="01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab" exitCode=2 Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886310 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r2tzg" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886450 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r2tzg" event={"ID":"c58a0d86-56ac-4aaa-b2c9-995c925cd839","Type":"ContainerDied","Data":"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab"} Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886587 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r2tzg" event={"ID":"c58a0d86-56ac-4aaa-b2c9-995c925cd839","Type":"ContainerDied","Data":"67d2db5645bd7ecba9970d32347525db60b6c8aa0fc4831443d3338030b1bd59"} Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.886614 4622 scope.go:117] "RemoveContainer" containerID="01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.892618 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" event={"ID":"ea5f56ee-1066-4bb3-b055-1550d4fd9fd8","Type":"ContainerDied","Data":"d6b9e855b7205b1f96060aacfd8985817348c5e48364b47acba5f25cda473349"} Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.892664 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6b9e855b7205b1f96060aacfd8985817348c5e48364b47acba5f25cda473349" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.892701 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.902969 4622 scope.go:117] "RemoveContainer" containerID="01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.903036 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:22:00 crc kubenswrapper[4622]: E1126 11:22:00.903456 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab\": container with ID starting with 01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab not found: ID does not exist" containerID="01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.903532 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab"} err="failed to get container status \"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab\": rpc error: code = NotFound desc = could not find container \"01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab\": container with ID starting with 01d8fbd0913a8775e6aa41f486212fd64f03bd1ad4913aa12e410773e0f65dab not found: ID does not exist" Nov 26 11:22:00 crc kubenswrapper[4622]: I1126 11:22:00.906662 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-r2tzg"] Nov 26 11:22:02 crc kubenswrapper[4622]: I1126 11:22:02.711247 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" path="/var/lib/kubelet/pods/c58a0d86-56ac-4aaa-b2c9-995c925cd839/volumes" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.372701 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q"] Nov 26 11:22:09 crc kubenswrapper[4622]: E1126 11:22:09.373433 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerName="console" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373450 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerName="console" Nov 26 11:22:09 crc kubenswrapper[4622]: E1126 11:22:09.373467 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="extract" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373473 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="extract" Nov 26 11:22:09 crc kubenswrapper[4622]: E1126 11:22:09.373487 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="pull" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373494 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="pull" Nov 26 11:22:09 crc kubenswrapper[4622]: E1126 11:22:09.373526 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="util" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373532 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="util" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373645 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5f56ee-1066-4bb3-b055-1550d4fd9fd8" containerName="extract" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.373664 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c58a0d86-56ac-4aaa-b2c9-995c925cd839" containerName="console" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.374114 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.376623 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.377723 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-42gxw" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.377726 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.377802 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.377812 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.387310 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q"] Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.492650 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-webhook-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.493002 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqvh\" (UniqueName: \"kubernetes.io/projected/70d2aa06-6937-4f5c-9e75-17a777eb7e78-kube-api-access-wwqvh\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.493139 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-apiservice-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.505387 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr"] Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.506148 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.508098 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.508342 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.508643 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-ffxbz" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.519462 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr"] Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594360 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q48g\" (UniqueName: \"kubernetes.io/projected/1b655923-a88c-49ea-938e-bff6999e007a-kube-api-access-9q48g\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594434 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-webhook-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594546 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqvh\" (UniqueName: \"kubernetes.io/projected/70d2aa06-6937-4f5c-9e75-17a777eb7e78-kube-api-access-wwqvh\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594643 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-apiservice-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594670 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-webhook-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.594688 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-apiservice-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.600372 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-apiservice-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.608145 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqvh\" (UniqueName: \"kubernetes.io/projected/70d2aa06-6937-4f5c-9e75-17a777eb7e78-kube-api-access-wwqvh\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.614234 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70d2aa06-6937-4f5c-9e75-17a777eb7e78-webhook-cert\") pod \"metallb-operator-controller-manager-574b75b4b5-vqg2q\" (UID: \"70d2aa06-6937-4f5c-9e75-17a777eb7e78\") " pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.690439 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.695412 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-apiservice-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.695466 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-webhook-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.695527 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q48g\" (UniqueName: \"kubernetes.io/projected/1b655923-a88c-49ea-938e-bff6999e007a-kube-api-access-9q48g\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.698909 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-apiservice-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.701497 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1b655923-a88c-49ea-938e-bff6999e007a-webhook-cert\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.714339 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q48g\" (UniqueName: \"kubernetes.io/projected/1b655923-a88c-49ea-938e-bff6999e007a-kube-api-access-9q48g\") pod \"metallb-operator-webhook-server-5765679bfc-p7wlr\" (UID: \"1b655923-a88c-49ea-938e-bff6999e007a\") " pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.817691 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.874651 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q"] Nov 26 11:22:09 crc kubenswrapper[4622]: I1126 11:22:09.951350 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" event={"ID":"70d2aa06-6937-4f5c-9e75-17a777eb7e78","Type":"ContainerStarted","Data":"19cd98f6c14e8017595d3102288a9b51b01d4adf9f2fd947811f4ec92bfcdde5"} Nov 26 11:22:10 crc kubenswrapper[4622]: I1126 11:22:10.225988 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr"] Nov 26 11:22:10 crc kubenswrapper[4622]: W1126 11:22:10.230038 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b655923_a88c_49ea_938e_bff6999e007a.slice/crio-c73ccb3487fdd95fe27340cf63c2887eb954850e8cda7980193d65220d260617 WatchSource:0}: Error finding container c73ccb3487fdd95fe27340cf63c2887eb954850e8cda7980193d65220d260617: Status 404 returned error can't find the container with id c73ccb3487fdd95fe27340cf63c2887eb954850e8cda7980193d65220d260617 Nov 26 11:22:10 crc kubenswrapper[4622]: I1126 11:22:10.956831 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" event={"ID":"1b655923-a88c-49ea-938e-bff6999e007a","Type":"ContainerStarted","Data":"c73ccb3487fdd95fe27340cf63c2887eb954850e8cda7980193d65220d260617"} Nov 26 11:22:12 crc kubenswrapper[4622]: I1126 11:22:12.968565 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" event={"ID":"70d2aa06-6937-4f5c-9e75-17a777eb7e78","Type":"ContainerStarted","Data":"6952172fafbeda1c9ef1bbed3bc38983d02164406f7dfcf6da0656ed23acebd5"} Nov 26 11:22:12 crc kubenswrapper[4622]: I1126 11:22:12.969065 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:12 crc kubenswrapper[4622]: I1126 11:22:12.989407 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" podStartSLOduration=1.4526398839999999 podStartE2EDuration="3.989388891s" podCreationTimestamp="2025-11-26 11:22:09 +0000 UTC" firstStartedPulling="2025-11-26 11:22:09.885825233 +0000 UTC m=+689.477036755" lastFinishedPulling="2025-11-26 11:22:12.42257424 +0000 UTC m=+692.013785762" observedRunningTime="2025-11-26 11:22:12.985456502 +0000 UTC m=+692.576668024" watchObservedRunningTime="2025-11-26 11:22:12.989388891 +0000 UTC m=+692.580600413" Nov 26 11:22:14 crc kubenswrapper[4622]: I1126 11:22:14.981086 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" event={"ID":"1b655923-a88c-49ea-938e-bff6999e007a","Type":"ContainerStarted","Data":"b4e6995a31bd8d7573dea53fcfe842ba8690f655c624e0b3382c539234883227"} Nov 26 11:22:14 crc kubenswrapper[4622]: I1126 11:22:14.982415 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:15 crc kubenswrapper[4622]: I1126 11:22:15.000246 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" podStartSLOduration=1.654834651 podStartE2EDuration="6.00022565s" podCreationTimestamp="2025-11-26 11:22:09 +0000 UTC" firstStartedPulling="2025-11-26 11:22:10.232837684 +0000 UTC m=+689.824049206" lastFinishedPulling="2025-11-26 11:22:14.578228683 +0000 UTC m=+694.169440205" observedRunningTime="2025-11-26 11:22:14.997477886 +0000 UTC m=+694.588689407" watchObservedRunningTime="2025-11-26 11:22:15.00022565 +0000 UTC m=+694.591437172" Nov 26 11:22:15 crc kubenswrapper[4622]: I1126 11:22:15.199371 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:22:15 crc kubenswrapper[4622]: I1126 11:22:15.199435 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:22:29 crc kubenswrapper[4622]: I1126 11:22:29.822055 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5765679bfc-p7wlr" Nov 26 11:22:40 crc kubenswrapper[4622]: I1126 11:22:40.905787 4622 scope.go:117] "RemoveContainer" containerID="2232ed986a4a1b10e26df6e28cd0f3fcb55fa81173f358f47511d61cc5e4449d" Nov 26 11:22:45 crc kubenswrapper[4622]: I1126 11:22:45.199096 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:22:45 crc kubenswrapper[4622]: I1126 11:22:45.199465 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:22:49 crc kubenswrapper[4622]: I1126 11:22:49.693524 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-574b75b4b5-vqg2q" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.285694 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-fts9w"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.288026 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.290285 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.293006 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.294586 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.297778 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.298529 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-dtfwt" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.302592 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.324240 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.362335 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-wb55x"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.363486 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.365790 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.368677 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.368691 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-mf2j4" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.368913 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377733 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmt86\" (UniqueName: \"kubernetes.io/projected/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-kube-api-access-vmt86\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377775 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-reloader\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377800 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377826 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2k8\" (UniqueName: \"kubernetes.io/projected/ca0f25fe-37e0-4346-9fcc-96152cb67a78-kube-api-access-8n2k8\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377898 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-conf\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377924 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca0f25fe-37e0-4346-9fcc-96152cb67a78-cert\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377968 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-startup\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.377989 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.378016 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-sockets\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.379128 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-t4v8v"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.381432 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.391229 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.413001 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-t4v8v"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.479726 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-cert\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.479781 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.479831 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-conf\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.479914 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca0f25fe-37e0-4346-9fcc-96152cb67a78-cert\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.479988 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9c03ee24-a489-4550-9f60-c1839cf29670-metallb-excludel2\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480010 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-startup\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480027 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480066 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xlm5\" (UniqueName: \"kubernetes.io/projected/501feec2-41da-496a-917e-7c032d616625-kube-api-access-2xlm5\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480100 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-sockets\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480141 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-metrics-certs\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480194 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480216 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmt86\" (UniqueName: \"kubernetes.io/projected/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-kube-api-access-vmt86\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480235 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zvtv\" (UniqueName: \"kubernetes.io/projected/9c03ee24-a489-4550-9f60-c1839cf29670-kube-api-access-5zvtv\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480242 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-conf\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480267 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-reloader\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480303 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480330 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2k8\" (UniqueName: \"kubernetes.io/projected/ca0f25fe-37e0-4346-9fcc-96152cb67a78-kube-api-access-8n2k8\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.480440 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-sockets\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.481180 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-reloader\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.481300 4622 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.481381 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs podName:a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59 nodeName:}" failed. No retries permitted until 2025-11-26 11:22:50.981359347 +0000 UTC m=+730.572570868 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs") pod "frr-k8s-fts9w" (UID: "a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59") : secret "frr-k8s-certs-secret" not found Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.481964 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.482041 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-frr-startup\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.489166 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca0f25fe-37e0-4346-9fcc-96152cb67a78-cert\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.502353 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmt86\" (UniqueName: \"kubernetes.io/projected/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-kube-api-access-vmt86\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.505068 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2k8\" (UniqueName: \"kubernetes.io/projected/ca0f25fe-37e0-4346-9fcc-96152cb67a78-kube-api-access-8n2k8\") pod \"frr-k8s-webhook-server-6998585d5-gtbw2\" (UID: \"ca0f25fe-37e0-4346-9fcc-96152cb67a78\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.581598 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xlm5\" (UniqueName: \"kubernetes.io/projected/501feec2-41da-496a-917e-7c032d616625-kube-api-access-2xlm5\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.581882 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-metrics-certs\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.582023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.582113 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zvtv\" (UniqueName: \"kubernetes.io/projected/9c03ee24-a489-4550-9f60-c1839cf29670-kube-api-access-5zvtv\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.582253 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-cert\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.582334 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.582153 4622 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.582490 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs podName:9c03ee24-a489-4550-9f60-c1839cf29670 nodeName:}" failed. No retries permitted until 2025-11-26 11:22:51.082462838 +0000 UTC m=+730.673674360 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs") pod "speaker-wb55x" (UID: "9c03ee24-a489-4550-9f60-c1839cf29670") : secret "speaker-certs-secret" not found Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.582575 4622 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 26 11:22:50 crc kubenswrapper[4622]: E1126 11:22:50.582631 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist podName:9c03ee24-a489-4550-9f60-c1839cf29670 nodeName:}" failed. No retries permitted until 2025-11-26 11:22:51.082618411 +0000 UTC m=+730.673829933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist") pod "speaker-wb55x" (UID: "9c03ee24-a489-4550-9f60-c1839cf29670") : secret "metallb-memberlist" not found Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.582725 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9c03ee24-a489-4550-9f60-c1839cf29670-metallb-excludel2\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.583459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9c03ee24-a489-4550-9f60-c1839cf29670-metallb-excludel2\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.586813 4622 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.586969 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-metrics-certs\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.596308 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/501feec2-41da-496a-917e-7c032d616625-cert\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.599341 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zvtv\" (UniqueName: \"kubernetes.io/projected/9c03ee24-a489-4550-9f60-c1839cf29670-kube-api-access-5zvtv\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.599932 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xlm5\" (UniqueName: \"kubernetes.io/projected/501feec2-41da-496a-917e-7c032d616625-kube-api-access-2xlm5\") pod \"controller-6c7b4b5f48-t4v8v\" (UID: \"501feec2-41da-496a-917e-7c032d616625\") " pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.636784 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.696384 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.813670 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2"] Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.988490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:50 crc kubenswrapper[4622]: I1126 11:22:50.992821 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59-metrics-certs\") pod \"frr-k8s-fts9w\" (UID: \"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59\") " pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.064373 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-t4v8v"] Nov 26 11:22:51 crc kubenswrapper[4622]: W1126 11:22:51.069269 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod501feec2_41da_496a_917e_7c032d616625.slice/crio-f61b6529b33a40e8095272093db632f75ec16d4d505e9a4c1e25f5c00cdf81f0 WatchSource:0}: Error finding container f61b6529b33a40e8095272093db632f75ec16d4d505e9a4c1e25f5c00cdf81f0: Status 404 returned error can't find the container with id f61b6529b33a40e8095272093db632f75ec16d4d505e9a4c1e25f5c00cdf81f0 Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.090048 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.090164 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:51 crc kubenswrapper[4622]: E1126 11:22:51.090178 4622 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 26 11:22:51 crc kubenswrapper[4622]: E1126 11:22:51.090241 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist podName:9c03ee24-a489-4550-9f60-c1839cf29670 nodeName:}" failed. No retries permitted until 2025-11-26 11:22:52.090225322 +0000 UTC m=+731.681436845 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist") pod "speaker-wb55x" (UID: "9c03ee24-a489-4550-9f60-c1839cf29670") : secret "metallb-memberlist" not found Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.092905 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-metrics-certs\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.160412 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" event={"ID":"ca0f25fe-37e0-4346-9fcc-96152cb67a78","Type":"ContainerStarted","Data":"81bb473e39a1323fdc512a08511fadc2a1acad12cc57ce4c7eaa62587fa854f8"} Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.162381 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-t4v8v" event={"ID":"501feec2-41da-496a-917e-7c032d616625","Type":"ContainerStarted","Data":"f61b6529b33a40e8095272093db632f75ec16d4d505e9a4c1e25f5c00cdf81f0"} Nov 26 11:22:51 crc kubenswrapper[4622]: I1126 11:22:51.230132 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.108555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.115355 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9c03ee24-a489-4550-9f60-c1839cf29670-memberlist\") pod \"speaker-wb55x\" (UID: \"9c03ee24-a489-4550-9f60-c1839cf29670\") " pod="metallb-system/speaker-wb55x" Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.172948 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"02e2695223999358fba05fb709909acc6636fb889f4225ffe8f1a0f36d51a43f"} Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.175009 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-t4v8v" event={"ID":"501feec2-41da-496a-917e-7c032d616625","Type":"ContainerStarted","Data":"ee47affe6d32fbe5b1fecc6dbd8e3564d28b9196b331a64c269336fc1be61eb9"} Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.175056 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-t4v8v" event={"ID":"501feec2-41da-496a-917e-7c032d616625","Type":"ContainerStarted","Data":"13db80199e9aa1ad01b8f254d0ebfcabc5d37bb07cef0fd4d8d479f01df78253"} Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.175349 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:22:52 crc kubenswrapper[4622]: I1126 11:22:52.181258 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wb55x" Nov 26 11:22:52 crc kubenswrapper[4622]: W1126 11:22:52.198477 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c03ee24_a489_4550_9f60_c1839cf29670.slice/crio-75d914b4a9e1b98b6cf9e05a233eeee02d28a67936ff81aaa5cecbc65994d9a2 WatchSource:0}: Error finding container 75d914b4a9e1b98b6cf9e05a233eeee02d28a67936ff81aaa5cecbc65994d9a2: Status 404 returned error can't find the container with id 75d914b4a9e1b98b6cf9e05a233eeee02d28a67936ff81aaa5cecbc65994d9a2 Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.184374 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wb55x" event={"ID":"9c03ee24-a489-4550-9f60-c1839cf29670","Type":"ContainerStarted","Data":"2cbed90e21d673300027dcd7c760c5e7380cca5111a52834080613f192779549"} Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.184638 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wb55x" event={"ID":"9c03ee24-a489-4550-9f60-c1839cf29670","Type":"ContainerStarted","Data":"d51b479580a16d9403518cb8897f391e38050af56eb4815b451dc4f6bbfde15f"} Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.184652 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wb55x" event={"ID":"9c03ee24-a489-4550-9f60-c1839cf29670","Type":"ContainerStarted","Data":"75d914b4a9e1b98b6cf9e05a233eeee02d28a67936ff81aaa5cecbc65994d9a2"} Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.185149 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-wb55x" Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.204811 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-t4v8v" podStartSLOduration=3.20479372 podStartE2EDuration="3.20479372s" podCreationTimestamp="2025-11-26 11:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:22:52.189547517 +0000 UTC m=+731.780759039" watchObservedRunningTime="2025-11-26 11:22:53.20479372 +0000 UTC m=+732.796005242" Nov 26 11:22:53 crc kubenswrapper[4622]: I1126 11:22:53.206483 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-wb55x" podStartSLOduration=3.206469833 podStartE2EDuration="3.206469833s" podCreationTimestamp="2025-11-26 11:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:22:53.203879345 +0000 UTC m=+732.795090867" watchObservedRunningTime="2025-11-26 11:22:53.206469833 +0000 UTC m=+732.797681355" Nov 26 11:22:57 crc kubenswrapper[4622]: I1126 11:22:57.206917 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59" containerID="b8430a0a29199997451565ddb2105c17fa4941b8944b173719334d042c94b444" exitCode=0 Nov 26 11:22:57 crc kubenswrapper[4622]: I1126 11:22:57.207033 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerDied","Data":"b8430a0a29199997451565ddb2105c17fa4941b8944b173719334d042c94b444"} Nov 26 11:22:57 crc kubenswrapper[4622]: I1126 11:22:57.209684 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" event={"ID":"ca0f25fe-37e0-4346-9fcc-96152cb67a78","Type":"ContainerStarted","Data":"8dc0cfa336a209f7c8e2db7a85d7f4dadb509b0c6a02a7274e65bf2ce666dd47"} Nov 26 11:22:57 crc kubenswrapper[4622]: I1126 11:22:57.209852 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:22:58 crc kubenswrapper[4622]: I1126 11:22:58.216792 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59" containerID="3f5a1bef771bafd06085321ae67519abf56391fe39e9d5c4dc46f95f0ebf6276" exitCode=0 Nov 26 11:22:58 crc kubenswrapper[4622]: I1126 11:22:58.216878 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerDied","Data":"3f5a1bef771bafd06085321ae67519abf56391fe39e9d5c4dc46f95f0ebf6276"} Nov 26 11:22:58 crc kubenswrapper[4622]: I1126 11:22:58.233007 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" podStartSLOduration=2.072006107 podStartE2EDuration="8.232989435s" podCreationTimestamp="2025-11-26 11:22:50 +0000 UTC" firstStartedPulling="2025-11-26 11:22:50.823223141 +0000 UTC m=+730.414434663" lastFinishedPulling="2025-11-26 11:22:56.984206469 +0000 UTC m=+736.575417991" observedRunningTime="2025-11-26 11:22:57.2425549 +0000 UTC m=+736.833766422" watchObservedRunningTime="2025-11-26 11:22:58.232989435 +0000 UTC m=+737.824200957" Nov 26 11:22:59 crc kubenswrapper[4622]: I1126 11:22:59.224846 4622 generic.go:334] "Generic (PLEG): container finished" podID="a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59" containerID="bf9b43f51765ab5b467790c53da2f912f0924d562f874a3d56feb2a187b213a9" exitCode=0 Nov 26 11:22:59 crc kubenswrapper[4622]: I1126 11:22:59.224920 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerDied","Data":"bf9b43f51765ab5b467790c53da2f912f0924d562f874a3d56feb2a187b213a9"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.234944 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"505423bbca840287cacdd1eee38804b023670c11de9b3d6074523792ef2b4478"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235211 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"fb07468143ebfdc3495e270b9a27b6d8c2f6d25d0146c74d0ddca7f20059ca14"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235232 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235245 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"36a548af1991ee6aef2bc80dee0c7b38a1045a1eb33ad41aca8db114ac3ad691"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235257 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"ed307eb0d3373cfcfafb5f9c3d112476fa1db327e2f63e545f475958bb5cc1ff"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235266 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"ded410de33376bd0e847292c97d359403701807c8497b4753a137ddcf14cdbc8"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.235274 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fts9w" event={"ID":"a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59","Type":"ContainerStarted","Data":"b7fc51711157ada473a74f101f866bb76548ada8c45418ed510fa389c585bae7"} Nov 26 11:23:00 crc kubenswrapper[4622]: I1126 11:23:00.253773 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-fts9w" podStartSLOduration=4.539846557 podStartE2EDuration="10.253756079s" podCreationTimestamp="2025-11-26 11:22:50 +0000 UTC" firstStartedPulling="2025-11-26 11:22:51.290065509 +0000 UTC m=+730.881277032" lastFinishedPulling="2025-11-26 11:22:57.003975032 +0000 UTC m=+736.595186554" observedRunningTime="2025-11-26 11:23:00.252841212 +0000 UTC m=+739.844052735" watchObservedRunningTime="2025-11-26 11:23:00.253756079 +0000 UTC m=+739.844967601" Nov 26 11:23:01 crc kubenswrapper[4622]: I1126 11:23:01.231052 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:23:01 crc kubenswrapper[4622]: I1126 11:23:01.266029 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:23:02 crc kubenswrapper[4622]: I1126 11:23:02.185708 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-wb55x" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.388849 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.389971 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.396576 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.399047 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.399063 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-j7tfn" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.399055 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.492631 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vffw2\" (UniqueName: \"kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2\") pod \"openstack-operator-index-c78gf\" (UID: \"235ef1ac-3607-4dbd-895c-4033a01fcda6\") " pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.594667 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vffw2\" (UniqueName: \"kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2\") pod \"openstack-operator-index-c78gf\" (UID: \"235ef1ac-3607-4dbd-895c-4033a01fcda6\") " pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.613922 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vffw2\" (UniqueName: \"kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2\") pod \"openstack-operator-index-c78gf\" (UID: \"235ef1ac-3607-4dbd-895c-4033a01fcda6\") " pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:04 crc kubenswrapper[4622]: I1126 11:23:04.717987 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:05 crc kubenswrapper[4622]: I1126 11:23:05.085742 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:05 crc kubenswrapper[4622]: W1126 11:23:05.090087 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod235ef1ac_3607_4dbd_895c_4033a01fcda6.slice/crio-d8f97e506a8275ec0050cfa87f2adeb6d7a09af2d24fcc79ddf2745bd8636414 WatchSource:0}: Error finding container d8f97e506a8275ec0050cfa87f2adeb6d7a09af2d24fcc79ddf2745bd8636414: Status 404 returned error can't find the container with id d8f97e506a8275ec0050cfa87f2adeb6d7a09af2d24fcc79ddf2745bd8636414 Nov 26 11:23:05 crc kubenswrapper[4622]: I1126 11:23:05.260305 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c78gf" event={"ID":"235ef1ac-3607-4dbd-895c-4033a01fcda6","Type":"ContainerStarted","Data":"d8f97e506a8275ec0050cfa87f2adeb6d7a09af2d24fcc79ddf2745bd8636414"} Nov 26 11:23:07 crc kubenswrapper[4622]: I1126 11:23:07.271198 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c78gf" event={"ID":"235ef1ac-3607-4dbd-895c-4033a01fcda6","Type":"ContainerStarted","Data":"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7"} Nov 26 11:23:07 crc kubenswrapper[4622]: I1126 11:23:07.293271 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-c78gf" podStartSLOduration=1.758106985 podStartE2EDuration="3.293252976s" podCreationTimestamp="2025-11-26 11:23:04 +0000 UTC" firstStartedPulling="2025-11-26 11:23:05.092273792 +0000 UTC m=+744.683485314" lastFinishedPulling="2025-11-26 11:23:06.627419783 +0000 UTC m=+746.218631305" observedRunningTime="2025-11-26 11:23:07.288561864 +0000 UTC m=+746.879773386" watchObservedRunningTime="2025-11-26 11:23:07.293252976 +0000 UTC m=+746.884464498" Nov 26 11:23:07 crc kubenswrapper[4622]: I1126 11:23:07.688469 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.297761 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nctx2"] Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.298570 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.304344 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nctx2"] Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.350978 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhtfg\" (UniqueName: \"kubernetes.io/projected/9aa64d20-32c0-4ded-856e-a5f4b01cef1e-kube-api-access-qhtfg\") pod \"openstack-operator-index-nctx2\" (UID: \"9aa64d20-32c0-4ded-856e-a5f4b01cef1e\") " pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.452529 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhtfg\" (UniqueName: \"kubernetes.io/projected/9aa64d20-32c0-4ded-856e-a5f4b01cef1e-kube-api-access-qhtfg\") pod \"openstack-operator-index-nctx2\" (UID: \"9aa64d20-32c0-4ded-856e-a5f4b01cef1e\") " pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.471089 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhtfg\" (UniqueName: \"kubernetes.io/projected/9aa64d20-32c0-4ded-856e-a5f4b01cef1e-kube-api-access-qhtfg\") pod \"openstack-operator-index-nctx2\" (UID: \"9aa64d20-32c0-4ded-856e-a5f4b01cef1e\") " pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.616892 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:08 crc kubenswrapper[4622]: I1126 11:23:08.775255 4622 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.002688 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nctx2"] Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.281926 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nctx2" event={"ID":"9aa64d20-32c0-4ded-856e-a5f4b01cef1e","Type":"ContainerStarted","Data":"abc99c36bc065092f5bef60d753e191e3dffda7ef536417e89d2a442a439623f"} Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.282768 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-c78gf" podUID="235ef1ac-3607-4dbd-895c-4033a01fcda6" containerName="registry-server" containerID="cri-o://a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7" gracePeriod=2 Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.584875 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.670165 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vffw2\" (UniqueName: \"kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2\") pod \"235ef1ac-3607-4dbd-895c-4033a01fcda6\" (UID: \"235ef1ac-3607-4dbd-895c-4033a01fcda6\") " Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.675266 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2" (OuterVolumeSpecName: "kube-api-access-vffw2") pod "235ef1ac-3607-4dbd-895c-4033a01fcda6" (UID: "235ef1ac-3607-4dbd-895c-4033a01fcda6"). InnerVolumeSpecName "kube-api-access-vffw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:23:09 crc kubenswrapper[4622]: I1126 11:23:09.772158 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vffw2\" (UniqueName: \"kubernetes.io/projected/235ef1ac-3607-4dbd-895c-4033a01fcda6-kube-api-access-vffw2\") on node \"crc\" DevicePath \"\"" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.288819 4622 generic.go:334] "Generic (PLEG): container finished" podID="235ef1ac-3607-4dbd-895c-4033a01fcda6" containerID="a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7" exitCode=0 Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.288906 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c78gf" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.288945 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c78gf" event={"ID":"235ef1ac-3607-4dbd-895c-4033a01fcda6","Type":"ContainerDied","Data":"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7"} Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.288993 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c78gf" event={"ID":"235ef1ac-3607-4dbd-895c-4033a01fcda6","Type":"ContainerDied","Data":"d8f97e506a8275ec0050cfa87f2adeb6d7a09af2d24fcc79ddf2745bd8636414"} Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.289013 4622 scope.go:117] "RemoveContainer" containerID="a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.290482 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nctx2" event={"ID":"9aa64d20-32c0-4ded-856e-a5f4b01cef1e","Type":"ContainerStarted","Data":"5e10ca95dffb629d1be449f7813453674aebf4a8dcd7c26ab58cee6519d0cb8d"} Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.307576 4622 scope.go:117] "RemoveContainer" containerID="a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7" Nov 26 11:23:10 crc kubenswrapper[4622]: E1126 11:23:10.308993 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7\": container with ID starting with a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7 not found: ID does not exist" containerID="a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.309053 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7"} err="failed to get container status \"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7\": rpc error: code = NotFound desc = could not find container \"a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7\": container with ID starting with a1b78695223966c6f25ad8aeef83a1bb4af4be28a8300ad5a1dc8a8e8ae3a5c7 not found: ID does not exist" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.312023 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nctx2" podStartSLOduration=1.524026894 podStartE2EDuration="2.311995686s" podCreationTimestamp="2025-11-26 11:23:08 +0000 UTC" firstStartedPulling="2025-11-26 11:23:09.009606836 +0000 UTC m=+748.600818358" lastFinishedPulling="2025-11-26 11:23:09.797575618 +0000 UTC m=+749.388787150" observedRunningTime="2025-11-26 11:23:10.308607262 +0000 UTC m=+749.899818804" watchObservedRunningTime="2025-11-26 11:23:10.311995686 +0000 UTC m=+749.903207208" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.321303 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.325299 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-c78gf"] Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.641328 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-gtbw2" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.701000 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-t4v8v" Nov 26 11:23:10 crc kubenswrapper[4622]: I1126 11:23:10.711061 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="235ef1ac-3607-4dbd-895c-4033a01fcda6" path="/var/lib/kubelet/pods/235ef1ac-3607-4dbd-895c-4033a01fcda6/volumes" Nov 26 11:23:11 crc kubenswrapper[4622]: I1126 11:23:11.234562 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-fts9w" Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.199759 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.200115 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.200180 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.200878 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.200959 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32" gracePeriod=600 Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.332131 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32" exitCode=0 Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.332202 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32"} Nov 26 11:23:15 crc kubenswrapper[4622]: I1126 11:23:15.332296 4622 scope.go:117] "RemoveContainer" containerID="766a6366557cfa7283764d83904fccd9ef25d7f74833ab3b92a945f25d2ba5c1" Nov 26 11:23:16 crc kubenswrapper[4622]: I1126 11:23:16.338481 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df"} Nov 26 11:23:18 crc kubenswrapper[4622]: I1126 11:23:18.618136 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:18 crc kubenswrapper[4622]: I1126 11:23:18.618438 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:18 crc kubenswrapper[4622]: I1126 11:23:18.641881 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:19 crc kubenswrapper[4622]: I1126 11:23:19.380847 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-nctx2" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.518636 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz"] Nov 26 11:23:25 crc kubenswrapper[4622]: E1126 11:23:25.519140 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="235ef1ac-3607-4dbd-895c-4033a01fcda6" containerName="registry-server" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.519157 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="235ef1ac-3607-4dbd-895c-4033a01fcda6" containerName="registry-server" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.519262 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="235ef1ac-3607-4dbd-895c-4033a01fcda6" containerName="registry-server" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.520037 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.521817 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8r4x4" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.526650 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz"] Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.593483 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.593553 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.593674 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7kzc\" (UniqueName: \"kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.694140 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.694188 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.694273 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7kzc\" (UniqueName: \"kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.694588 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.694642 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.709322 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7kzc\" (UniqueName: \"kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc\") pod \"3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:25 crc kubenswrapper[4622]: I1126 11:23:25.834897 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:26 crc kubenswrapper[4622]: I1126 11:23:26.172282 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz"] Nov 26 11:23:26 crc kubenswrapper[4622]: W1126 11:23:26.174820 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb60b1af_671e_46ac_ad17_3903260919b9.slice/crio-abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699 WatchSource:0}: Error finding container abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699: Status 404 returned error can't find the container with id abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699 Nov 26 11:23:26 crc kubenswrapper[4622]: I1126 11:23:26.393116 4622 generic.go:334] "Generic (PLEG): container finished" podID="fb60b1af-671e-46ac-ad17-3903260919b9" containerID="cc5a1aaeb6f84a4c31e777f47249a69ba233d9369fe548807387164cf33cb7cd" exitCode=0 Nov 26 11:23:26 crc kubenswrapper[4622]: I1126 11:23:26.393168 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" event={"ID":"fb60b1af-671e-46ac-ad17-3903260919b9","Type":"ContainerDied","Data":"cc5a1aaeb6f84a4c31e777f47249a69ba233d9369fe548807387164cf33cb7cd"} Nov 26 11:23:26 crc kubenswrapper[4622]: I1126 11:23:26.393225 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" event={"ID":"fb60b1af-671e-46ac-ad17-3903260919b9","Type":"ContainerStarted","Data":"abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699"} Nov 26 11:23:28 crc kubenswrapper[4622]: I1126 11:23:28.404530 4622 generic.go:334] "Generic (PLEG): container finished" podID="fb60b1af-671e-46ac-ad17-3903260919b9" containerID="8d3b86d94db4f5ab7d41b3431deb9d782b95d59029a12363f62ff97e99e2b8ee" exitCode=0 Nov 26 11:23:28 crc kubenswrapper[4622]: I1126 11:23:28.404613 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" event={"ID":"fb60b1af-671e-46ac-ad17-3903260919b9","Type":"ContainerDied","Data":"8d3b86d94db4f5ab7d41b3431deb9d782b95d59029a12363f62ff97e99e2b8ee"} Nov 26 11:23:29 crc kubenswrapper[4622]: I1126 11:23:29.412873 4622 generic.go:334] "Generic (PLEG): container finished" podID="fb60b1af-671e-46ac-ad17-3903260919b9" containerID="44864dbf668f7936a645b69439647d6940307adde7ec3e1f2daaf1d85db71921" exitCode=0 Nov 26 11:23:29 crc kubenswrapper[4622]: I1126 11:23:29.412995 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" event={"ID":"fb60b1af-671e-46ac-ad17-3903260919b9","Type":"ContainerDied","Data":"44864dbf668f7936a645b69439647d6940307adde7ec3e1f2daaf1d85db71921"} Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.660741 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.761744 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7kzc\" (UniqueName: \"kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc\") pod \"fb60b1af-671e-46ac-ad17-3903260919b9\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.761815 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle\") pod \"fb60b1af-671e-46ac-ad17-3903260919b9\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.761873 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util\") pod \"fb60b1af-671e-46ac-ad17-3903260919b9\" (UID: \"fb60b1af-671e-46ac-ad17-3903260919b9\") " Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.762814 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle" (OuterVolumeSpecName: "bundle") pod "fb60b1af-671e-46ac-ad17-3903260919b9" (UID: "fb60b1af-671e-46ac-ad17-3903260919b9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.767122 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc" (OuterVolumeSpecName: "kube-api-access-b7kzc") pod "fb60b1af-671e-46ac-ad17-3903260919b9" (UID: "fb60b1af-671e-46ac-ad17-3903260919b9"). InnerVolumeSpecName "kube-api-access-b7kzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.774436 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util" (OuterVolumeSpecName: "util") pod "fb60b1af-671e-46ac-ad17-3903260919b9" (UID: "fb60b1af-671e-46ac-ad17-3903260919b9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.863627 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7kzc\" (UniqueName: \"kubernetes.io/projected/fb60b1af-671e-46ac-ad17-3903260919b9-kube-api-access-b7kzc\") on node \"crc\" DevicePath \"\"" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.863658 4622 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:23:30 crc kubenswrapper[4622]: I1126 11:23:30.863670 4622 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fb60b1af-671e-46ac-ad17-3903260919b9-util\") on node \"crc\" DevicePath \"\"" Nov 26 11:23:31 crc kubenswrapper[4622]: I1126 11:23:31.432523 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" event={"ID":"fb60b1af-671e-46ac-ad17-3903260919b9","Type":"ContainerDied","Data":"abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699"} Nov 26 11:23:31 crc kubenswrapper[4622]: I1126 11:23:31.432607 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abca2f4b76d9eb8a6f5859a2f11be3ee2ea1218040fa3abdfd32a38bbd32b699" Nov 26 11:23:31 crc kubenswrapper[4622]: I1126 11:23:31.432649 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.499314 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:23:37 crc kubenswrapper[4622]: E1126 11:23:37.499962 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="util" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.499975 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="util" Nov 26 11:23:37 crc kubenswrapper[4622]: E1126 11:23:37.499986 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="pull" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.499992 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="pull" Nov 26 11:23:37 crc kubenswrapper[4622]: E1126 11:23:37.500001 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="extract" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.500006 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="extract" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.500112 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb60b1af-671e-46ac-ad17-3903260919b9" containerName="extract" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.500471 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.502148 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-mjr68" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.516798 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.662383 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfpmw\" (UniqueName: \"kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw\") pod \"openstack-operator-controller-operator-544fb75865-ztbk8\" (UID: \"d29c5844-9fc3-488b-a40e-7846eea50619\") " pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.763744 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfpmw\" (UniqueName: \"kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw\") pod \"openstack-operator-controller-operator-544fb75865-ztbk8\" (UID: \"d29c5844-9fc3-488b-a40e-7846eea50619\") " pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.779886 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfpmw\" (UniqueName: \"kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw\") pod \"openstack-operator-controller-operator-544fb75865-ztbk8\" (UID: \"d29c5844-9fc3-488b-a40e-7846eea50619\") " pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:37 crc kubenswrapper[4622]: I1126 11:23:37.814312 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:38 crc kubenswrapper[4622]: I1126 11:23:38.196098 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:23:38 crc kubenswrapper[4622]: I1126 11:23:38.470461 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" event={"ID":"d29c5844-9fc3-488b-a40e-7846eea50619","Type":"ContainerStarted","Data":"aee672e653974fb331b1b5ca6491bb7bce6f4780c694fcdddf51e013686f00ce"} Nov 26 11:23:42 crc kubenswrapper[4622]: I1126 11:23:42.504581 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" event={"ID":"d29c5844-9fc3-488b-a40e-7846eea50619","Type":"ContainerStarted","Data":"0107c566aa7d6be33c14f41f7faa1e20340ea506a28be29fc7782b135ad99c86"} Nov 26 11:23:42 crc kubenswrapper[4622]: I1126 11:23:42.505164 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:23:42 crc kubenswrapper[4622]: I1126 11:23:42.527519 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" podStartSLOduration=2.02952109 podStartE2EDuration="5.527480461s" podCreationTimestamp="2025-11-26 11:23:37 +0000 UTC" firstStartedPulling="2025-11-26 11:23:38.20267219 +0000 UTC m=+777.793883712" lastFinishedPulling="2025-11-26 11:23:41.700631561 +0000 UTC m=+781.291843083" observedRunningTime="2025-11-26 11:23:42.524533039 +0000 UTC m=+782.115744561" watchObservedRunningTime="2025-11-26 11:23:42.527480461 +0000 UTC m=+782.118691983" Nov 26 11:23:47 crc kubenswrapper[4622]: I1126 11:23:47.818548 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.614277 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.615767 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.617593 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-67ncj" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.623547 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.624538 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.627167 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-md9qg" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.633731 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-88ql9"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.634439 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.635986 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-pr9pd" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.644655 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.647964 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz75c\" (UniqueName: \"kubernetes.io/projected/202d189d-cd09-4574-bea4-ad76a0b82cc4-kube-api-access-rz75c\") pod \"designate-operator-controller-manager-955677c94-88ql9\" (UID: \"202d189d-cd09-4574-bea4-ad76a0b82cc4\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.648127 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvzgx\" (UniqueName: \"kubernetes.io/projected/2d3369dc-6ada-4806-8bf7-088fe4c0b913-kube-api-access-fvzgx\") pod \"cinder-operator-controller-manager-6b7f75547b-4244k\" (UID: \"2d3369dc-6ada-4806-8bf7-088fe4c0b913\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.648247 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qphp\" (UniqueName: \"kubernetes.io/projected/e952c5f2-5b48-4303-a533-e838c7a72c24-kube-api-access-6qphp\") pod \"barbican-operator-controller-manager-7b64f4fb85-k2ggr\" (UID: \"e952c5f2-5b48-4303-a533-e838c7a72c24\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.672759 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.673769 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.676075 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-szsf9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.687706 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.688752 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.690164 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-mwfvv" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.694242 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.698912 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-88ql9"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.704428 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.712865 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.713920 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.715476 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.715577 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.715664 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gst4t" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.716576 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.729616 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-kchfs" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.749578 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz75c\" (UniqueName: \"kubernetes.io/projected/202d189d-cd09-4574-bea4-ad76a0b82cc4-kube-api-access-rz75c\") pod \"designate-operator-controller-manager-955677c94-88ql9\" (UID: \"202d189d-cd09-4574-bea4-ad76a0b82cc4\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.749616 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt7pz\" (UniqueName: \"kubernetes.io/projected/43521a88-13da-419a-9af7-b13a9c7c11f8-kube-api-access-tt7pz\") pod \"heat-operator-controller-manager-5b77f656f-b4qw6\" (UID: \"43521a88-13da-419a-9af7-b13a9c7c11f8\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.749657 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvzgx\" (UniqueName: \"kubernetes.io/projected/2d3369dc-6ada-4806-8bf7-088fe4c0b913-kube-api-access-fvzgx\") pod \"cinder-operator-controller-manager-6b7f75547b-4244k\" (UID: \"2d3369dc-6ada-4806-8bf7-088fe4c0b913\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.749703 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qphp\" (UniqueName: \"kubernetes.io/projected/e952c5f2-5b48-4303-a533-e838c7a72c24-kube-api-access-6qphp\") pod \"barbican-operator-controller-manager-7b64f4fb85-k2ggr\" (UID: \"e952c5f2-5b48-4303-a533-e838c7a72c24\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.755023 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.756070 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.759774 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.763880 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-zlljc" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.773427 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz75c\" (UniqueName: \"kubernetes.io/projected/202d189d-cd09-4574-bea4-ad76a0b82cc4-kube-api-access-rz75c\") pod \"designate-operator-controller-manager-955677c94-88ql9\" (UID: \"202d189d-cd09-4574-bea4-ad76a0b82cc4\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.784004 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvzgx\" (UniqueName: \"kubernetes.io/projected/2d3369dc-6ada-4806-8bf7-088fe4c0b913-kube-api-access-fvzgx\") pod \"cinder-operator-controller-manager-6b7f75547b-4244k\" (UID: \"2d3369dc-6ada-4806-8bf7-088fe4c0b913\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.784115 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qphp\" (UniqueName: \"kubernetes.io/projected/e952c5f2-5b48-4303-a533-e838c7a72c24-kube-api-access-6qphp\") pod \"barbican-operator-controller-manager-7b64f4fb85-k2ggr\" (UID: \"e952c5f2-5b48-4303-a533-e838c7a72c24\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.795110 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.804976 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.818027 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.820089 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.822272 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-66dtz" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.840018 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.845771 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.850553 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.851061 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jn9nx" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.858852 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.859462 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5dd5\" (UniqueName: \"kubernetes.io/projected/b86c4e95-2314-4759-9b74-77c0c811fc82-kube-api-access-l5dd5\") pod \"glance-operator-controller-manager-589cbd6b5b-lrt59\" (UID: \"b86c4e95-2314-4759-9b74-77c0c811fc82\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.859593 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqlwr\" (UniqueName: \"kubernetes.io/projected/af470876-f83d-453f-bc58-96c91cabc509-kube-api-access-tqlwr\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.859737 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnbx6\" (UniqueName: \"kubernetes.io/projected/5031223b-1c18-4996-91c6-9a8a0db7a2eb-kube-api-access-bnbx6\") pod \"horizon-operator-controller-manager-5d494799bf-8h8x8\" (UID: \"5031223b-1c18-4996-91c6-9a8a0db7a2eb\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.859841 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt7pz\" (UniqueName: \"kubernetes.io/projected/43521a88-13da-419a-9af7-b13a9c7c11f8-kube-api-access-tt7pz\") pod \"heat-operator-controller-manager-5b77f656f-b4qw6\" (UID: \"43521a88-13da-419a-9af7-b13a9c7c11f8\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.871285 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.890887 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.903114 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt7pz\" (UniqueName: \"kubernetes.io/projected/43521a88-13da-419a-9af7-b13a9c7c11f8-kube-api-access-tt7pz\") pod \"heat-operator-controller-manager-5b77f656f-b4qw6\" (UID: \"43521a88-13da-419a-9af7-b13a9c7c11f8\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.906327 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.907595 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.909113 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-r2w7d" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.918297 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.919149 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.920399 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.921584 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-llq48" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.923389 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.924361 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.927048 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-lttmm" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.928192 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.935114 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.937638 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.941104 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.942262 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.944909 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-s5qsc" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.947853 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.948032 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.955443 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961091 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961679 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd54s\" (UniqueName: \"kubernetes.io/projected/9a113fe4-1281-4a2f-896c-bac8b89ce952-kube-api-access-nd54s\") pod \"neutron-operator-controller-manager-6fdcddb789-8mgd6\" (UID: \"9a113fe4-1281-4a2f-896c-bac8b89ce952\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961735 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqlwr\" (UniqueName: \"kubernetes.io/projected/af470876-f83d-453f-bc58-96c91cabc509-kube-api-access-tqlwr\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961787 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ns67\" (UniqueName: \"kubernetes.io/projected/79a68295-8240-4081-8c98-f75e2a04160b-kube-api-access-9ns67\") pod \"nova-operator-controller-manager-79556f57fc-wdjgz\" (UID: \"79a68295-8240-4081-8c98-f75e2a04160b\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961808 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfmw8\" (UniqueName: \"kubernetes.io/projected/e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a-kube-api-access-pfmw8\") pod \"keystone-operator-controller-manager-7b4567c7cf-9slpq\" (UID: \"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961839 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnbx6\" (UniqueName: \"kubernetes.io/projected/5031223b-1c18-4996-91c6-9a8a0db7a2eb-kube-api-access-bnbx6\") pod \"horizon-operator-controller-manager-5d494799bf-8h8x8\" (UID: \"5031223b-1c18-4996-91c6-9a8a0db7a2eb\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961862 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz5rb\" (UniqueName: \"kubernetes.io/projected/103f81dd-36e7-412f-a756-663e7d366a3a-kube-api-access-bz5rb\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-mrcsw\" (UID: \"103f81dd-36e7-412f-a756-663e7d366a3a\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961880 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz9xt\" (UniqueName: \"kubernetes.io/projected/2596bf62-d165-4a25-95b6-888c3e8f5caa-kube-api-access-zz9xt\") pod \"octavia-operator-controller-manager-64cdc6ff96-zc4pp\" (UID: \"2596bf62-d165-4a25-95b6-888c3e8f5caa\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961906 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.961924 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r548\" (UniqueName: \"kubernetes.io/projected/3ccaeddc-377f-4950-b942-9420c4bbeaa6-kube-api-access-9r548\") pod \"ironic-operator-controller-manager-67cb4dc6d4-2xzvm\" (UID: \"3ccaeddc-377f-4950-b942-9420c4bbeaa6\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.962054 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-296vj\" (UniqueName: \"kubernetes.io/projected/ff9243ae-4f66-4399-9ab0-67d195b2319f-kube-api-access-296vj\") pod \"manila-operator-controller-manager-5d499bf58b-nrn55\" (UID: \"ff9243ae-4f66-4399-9ab0-67d195b2319f\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.962480 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5dd5\" (UniqueName: \"kubernetes.io/projected/b86c4e95-2314-4759-9b74-77c0c811fc82-kube-api-access-l5dd5\") pod \"glance-operator-controller-manager-589cbd6b5b-lrt59\" (UID: \"b86c4e95-2314-4759-9b74-77c0c811fc82\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:24 crc kubenswrapper[4622]: E1126 11:24:24.962384 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:24 crc kubenswrapper[4622]: E1126 11:24:24.962675 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:25.462654467 +0000 UTC m=+825.053865989 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.963402 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.969328 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.970338 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.970535 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-mlczr" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.975667 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-dt5t8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.975869 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.980899 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5dd5\" (UniqueName: \"kubernetes.io/projected/b86c4e95-2314-4759-9b74-77c0c811fc82-kube-api-access-l5dd5\") pod \"glance-operator-controller-manager-589cbd6b5b-lrt59\" (UID: \"b86c4e95-2314-4759-9b74-77c0c811fc82\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.981001 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88"] Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.982334 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnbx6\" (UniqueName: \"kubernetes.io/projected/5031223b-1c18-4996-91c6-9a8a0db7a2eb-kube-api-access-bnbx6\") pod \"horizon-operator-controller-manager-5d494799bf-8h8x8\" (UID: \"5031223b-1c18-4996-91c6-9a8a0db7a2eb\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.986806 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:24 crc kubenswrapper[4622]: I1126 11:24:24.991709 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqlwr\" (UniqueName: \"kubernetes.io/projected/af470876-f83d-453f-bc58-96c91cabc509-kube-api-access-tqlwr\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.002719 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.006566 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.007891 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.021720 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.024652 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-qx77m" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.026407 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.031244 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.032307 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.035221 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-7fbkz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.035413 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.054378 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.054778 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.055698 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.057706 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.057911 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-rxbzd" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063539 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ns67\" (UniqueName: \"kubernetes.io/projected/79a68295-8240-4081-8c98-f75e2a04160b-kube-api-access-9ns67\") pod \"nova-operator-controller-manager-79556f57fc-wdjgz\" (UID: \"79a68295-8240-4081-8c98-f75e2a04160b\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063568 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfmw8\" (UniqueName: \"kubernetes.io/projected/e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a-kube-api-access-pfmw8\") pod \"keystone-operator-controller-manager-7b4567c7cf-9slpq\" (UID: \"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063631 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz5rb\" (UniqueName: \"kubernetes.io/projected/103f81dd-36e7-412f-a756-663e7d366a3a-kube-api-access-bz5rb\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-mrcsw\" (UID: \"103f81dd-36e7-412f-a756-663e7d366a3a\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063651 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz9xt\" (UniqueName: \"kubernetes.io/projected/2596bf62-d165-4a25-95b6-888c3e8f5caa-kube-api-access-zz9xt\") pod \"octavia-operator-controller-manager-64cdc6ff96-zc4pp\" (UID: \"2596bf62-d165-4a25-95b6-888c3e8f5caa\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063703 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r548\" (UniqueName: \"kubernetes.io/projected/3ccaeddc-377f-4950-b942-9420c4bbeaa6-kube-api-access-9r548\") pod \"ironic-operator-controller-manager-67cb4dc6d4-2xzvm\" (UID: \"3ccaeddc-377f-4950-b942-9420c4bbeaa6\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063728 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-296vj\" (UniqueName: \"kubernetes.io/projected/ff9243ae-4f66-4399-9ab0-67d195b2319f-kube-api-access-296vj\") pod \"manila-operator-controller-manager-5d499bf58b-nrn55\" (UID: \"ff9243ae-4f66-4399-9ab0-67d195b2319f\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.063768 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd54s\" (UniqueName: \"kubernetes.io/projected/9a113fe4-1281-4a2f-896c-bac8b89ce952-kube-api-access-nd54s\") pod \"neutron-operator-controller-manager-6fdcddb789-8mgd6\" (UID: \"9a113fe4-1281-4a2f-896c-bac8b89ce952\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.084737 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-296vj\" (UniqueName: \"kubernetes.io/projected/ff9243ae-4f66-4399-9ab0-67d195b2319f-kube-api-access-296vj\") pod \"manila-operator-controller-manager-5d499bf58b-nrn55\" (UID: \"ff9243ae-4f66-4399-9ab0-67d195b2319f\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.086660 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ns67\" (UniqueName: \"kubernetes.io/projected/79a68295-8240-4081-8c98-f75e2a04160b-kube-api-access-9ns67\") pod \"nova-operator-controller-manager-79556f57fc-wdjgz\" (UID: \"79a68295-8240-4081-8c98-f75e2a04160b\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.094184 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz9xt\" (UniqueName: \"kubernetes.io/projected/2596bf62-d165-4a25-95b6-888c3e8f5caa-kube-api-access-zz9xt\") pod \"octavia-operator-controller-manager-64cdc6ff96-zc4pp\" (UID: \"2596bf62-d165-4a25-95b6-888c3e8f5caa\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.095603 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz5rb\" (UniqueName: \"kubernetes.io/projected/103f81dd-36e7-412f-a756-663e7d366a3a-kube-api-access-bz5rb\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-mrcsw\" (UID: \"103f81dd-36e7-412f-a756-663e7d366a3a\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.098320 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd54s\" (UniqueName: \"kubernetes.io/projected/9a113fe4-1281-4a2f-896c-bac8b89ce952-kube-api-access-nd54s\") pod \"neutron-operator-controller-manager-6fdcddb789-8mgd6\" (UID: \"9a113fe4-1281-4a2f-896c-bac8b89ce952\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.113324 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r548\" (UniqueName: \"kubernetes.io/projected/3ccaeddc-377f-4950-b942-9420c4bbeaa6-kube-api-access-9r548\") pod \"ironic-operator-controller-manager-67cb4dc6d4-2xzvm\" (UID: \"3ccaeddc-377f-4950-b942-9420c4bbeaa6\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.119749 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfmw8\" (UniqueName: \"kubernetes.io/projected/e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a-kube-api-access-pfmw8\") pod \"keystone-operator-controller-manager-7b4567c7cf-9slpq\" (UID: \"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.145603 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.147783 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.149454 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.151765 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-464fn" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.154047 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165136 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr286\" (UniqueName: \"kubernetes.io/projected/421c5332-a5be-406b-a039-e44918747042-kube-api-access-nr286\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165171 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165210 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqj86\" (UniqueName: \"kubernetes.io/projected/56fff709-6338-4ce4-883c-699491e7d55e-kube-api-access-cqj86\") pod \"ovn-operator-controller-manager-56897c768d-jvn88\" (UID: \"56fff709-6338-4ce4-883c-699491e7d55e\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165230 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzj6k\" (UniqueName: \"kubernetes.io/projected/0694f996-33c7-4190-b883-0e404ce778f7-kube-api-access-bzj6k\") pod \"placement-operator-controller-manager-57988cc5b5-mn2wz\" (UID: \"0694f996-33c7-4190-b883-0e404ce778f7\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165262 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5fjb\" (UniqueName: \"kubernetes.io/projected/d30b60e7-2165-4660-a037-5a3c47f807e5-kube-api-access-x5fjb\") pod \"swift-operator-controller-manager-d77b94747-zhrsj\" (UID: \"d30b60e7-2165-4660-a037-5a3c47f807e5\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.165281 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpb7r\" (UniqueName: \"kubernetes.io/projected/c048fbdd-e084-4e64-9d3b-c9d8083b9019-kube-api-access-jpb7r\") pod \"telemetry-operator-controller-manager-76cc84c6bb-8rlt2\" (UID: \"c048fbdd-e084-4e64-9d3b-c9d8083b9019\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.171946 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.209841 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.213952 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.216123 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-fv8qk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.222308 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.236259 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.243166 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.254679 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.257861 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266380 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr286\" (UniqueName: \"kubernetes.io/projected/421c5332-a5be-406b-a039-e44918747042-kube-api-access-nr286\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266413 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266468 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqj86\" (UniqueName: \"kubernetes.io/projected/56fff709-6338-4ce4-883c-699491e7d55e-kube-api-access-cqj86\") pod \"ovn-operator-controller-manager-56897c768d-jvn88\" (UID: \"56fff709-6338-4ce4-883c-699491e7d55e\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266624 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzj6k\" (UniqueName: \"kubernetes.io/projected/0694f996-33c7-4190-b883-0e404ce778f7-kube-api-access-bzj6k\") pod \"placement-operator-controller-manager-57988cc5b5-mn2wz\" (UID: \"0694f996-33c7-4190-b883-0e404ce778f7\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266674 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5fjb\" (UniqueName: \"kubernetes.io/projected/d30b60e7-2165-4660-a037-5a3c47f807e5-kube-api-access-x5fjb\") pod \"swift-operator-controller-manager-d77b94747-zhrsj\" (UID: \"d30b60e7-2165-4660-a037-5a3c47f807e5\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266701 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt6gf\" (UniqueName: \"kubernetes.io/projected/8ef536a2-d861-4672-94e6-5058fc76eb85-kube-api-access-tt6gf\") pod \"test-operator-controller-manager-5cd6c7f4c8-9zzhk\" (UID: \"8ef536a2-d861-4672-94e6-5058fc76eb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.266723 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpb7r\" (UniqueName: \"kubernetes.io/projected/c048fbdd-e084-4e64-9d3b-c9d8083b9019-kube-api-access-jpb7r\") pod \"telemetry-operator-controller-manager-76cc84c6bb-8rlt2\" (UID: \"c048fbdd-e084-4e64-9d3b-c9d8083b9019\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.267268 4622 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.267333 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert podName:421c5332-a5be-406b-a039-e44918747042 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:25.767312981 +0000 UTC m=+825.358524503 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert") pod "openstack-baremetal-operator-controller-manager-674cb676c8pzq88" (UID: "421c5332-a5be-406b-a039-e44918747042") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.290036 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzj6k\" (UniqueName: \"kubernetes.io/projected/0694f996-33c7-4190-b883-0e404ce778f7-kube-api-access-bzj6k\") pod \"placement-operator-controller-manager-57988cc5b5-mn2wz\" (UID: \"0694f996-33c7-4190-b883-0e404ce778f7\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.292398 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqj86\" (UniqueName: \"kubernetes.io/projected/56fff709-6338-4ce4-883c-699491e7d55e-kube-api-access-cqj86\") pod \"ovn-operator-controller-manager-56897c768d-jvn88\" (UID: \"56fff709-6338-4ce4-883c-699491e7d55e\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.292515 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpb7r\" (UniqueName: \"kubernetes.io/projected/c048fbdd-e084-4e64-9d3b-c9d8083b9019-kube-api-access-jpb7r\") pod \"telemetry-operator-controller-manager-76cc84c6bb-8rlt2\" (UID: \"c048fbdd-e084-4e64-9d3b-c9d8083b9019\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.293209 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5fjb\" (UniqueName: \"kubernetes.io/projected/d30b60e7-2165-4660-a037-5a3c47f807e5-kube-api-access-x5fjb\") pod \"swift-operator-controller-manager-d77b94747-zhrsj\" (UID: \"d30b60e7-2165-4660-a037-5a3c47f807e5\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.297802 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr286\" (UniqueName: \"kubernetes.io/projected/421c5332-a5be-406b-a039-e44918747042-kube-api-access-nr286\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.334401 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.335717 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.343335 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.343446 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-chjn4" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.343632 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.343781 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.356873 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.360451 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-88ql9"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.368420 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbkbb\" (UniqueName: \"kubernetes.io/projected/671c4f3d-053f-4261-bac4-537e9f4c88fa-kube-api-access-lbkbb\") pod \"watcher-operator-controller-manager-656dcb59d4-mrntw\" (UID: \"671c4f3d-053f-4261-bac4-537e9f4c88fa\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.368528 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt6gf\" (UniqueName: \"kubernetes.io/projected/8ef536a2-d861-4672-94e6-5058fc76eb85-kube-api-access-tt6gf\") pod \"test-operator-controller-manager-5cd6c7f4c8-9zzhk\" (UID: \"8ef536a2-d861-4672-94e6-5058fc76eb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.380873 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.393914 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt6gf\" (UniqueName: \"kubernetes.io/projected/8ef536a2-d861-4672-94e6-5058fc76eb85-kube-api-access-tt6gf\") pod \"test-operator-controller-manager-5cd6c7f4c8-9zzhk\" (UID: \"8ef536a2-d861-4672-94e6-5058fc76eb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.396855 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.397270 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.412717 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.438290 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.439352 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.445307 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.445463 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-r4nkl" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.469618 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.469694 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.469748 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbkbb\" (UniqueName: \"kubernetes.io/projected/671c4f3d-053f-4261-bac4-537e9f4c88fa-kube-api-access-lbkbb\") pod \"watcher-operator-controller-manager-656dcb59d4-mrntw\" (UID: \"671c4f3d-053f-4261-bac4-537e9f4c88fa\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.469797 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x289\" (UniqueName: \"kubernetes.io/projected/903a6422-4d68-4aab-96e8-25452ffab66d-kube-api-access-9x289\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.469935 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.470151 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.470203 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:26.470185176 +0000 UTC m=+826.061396698 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.479196 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.491795 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbkbb\" (UniqueName: \"kubernetes.io/projected/671c4f3d-053f-4261-bac4-537e9f4c88fa-kube-api-access-lbkbb\") pod \"watcher-operator-controller-manager-656dcb59d4-mrntw\" (UID: \"671c4f3d-053f-4261-bac4-537e9f4c88fa\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.531083 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.571249 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.571329 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69cxm\" (UniqueName: \"kubernetes.io/projected/d402353c-5e0f-4f3c-b974-c327ed33adb9-kube-api-access-69cxm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xsfhw\" (UID: \"d402353c-5e0f-4f3c-b974-c327ed33adb9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.571354 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.571406 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x289\" (UniqueName: \"kubernetes.io/projected/903a6422-4d68-4aab-96e8-25452ffab66d-kube-api-access-9x289\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.572217 4622 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.572272 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:26.072256061 +0000 UTC m=+825.663467583 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.572466 4622 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.572581 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:26.072556889 +0000 UTC m=+825.663768411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "metrics-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.589714 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x289\" (UniqueName: \"kubernetes.io/projected/903a6422-4d68-4aab-96e8-25452ffab66d-kube-api-access-9x289\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.591497 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.600734 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.674547 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69cxm\" (UniqueName: \"kubernetes.io/projected/d402353c-5e0f-4f3c-b974-c327ed33adb9-kube-api-access-69cxm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xsfhw\" (UID: \"d402353c-5e0f-4f3c-b974-c327ed33adb9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.687852 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.698116 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69cxm\" (UniqueName: \"kubernetes.io/projected/d402353c-5e0f-4f3c-b974-c327ed33adb9-kube-api-access-69cxm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xsfhw\" (UID: \"d402353c-5e0f-4f3c-b974-c327ed33adb9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.704725 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8"] Nov 26 11:24:25 crc kubenswrapper[4622]: W1126 11:24:25.705558 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb86c4e95_2314_4759_9b74_77c0c811fc82.slice/crio-d5d1f0f42096c4aff4277bda3d652598a74eb238e107e9e014b0a330f063a803 WatchSource:0}: Error finding container d5d1f0f42096c4aff4277bda3d652598a74eb238e107e9e014b0a330f063a803: Status 404 returned error can't find the container with id d5d1f0f42096c4aff4277bda3d652598a74eb238e107e9e014b0a330f063a803 Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.756137 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" event={"ID":"e952c5f2-5b48-4303-a533-e838c7a72c24","Type":"ContainerStarted","Data":"ea6a9fe4d42f386cab079f31134c73c34344ae0f72c5327a7eef9ee9c216e02d"} Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.758677 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" event={"ID":"b86c4e95-2314-4759-9b74-77c0c811fc82","Type":"ContainerStarted","Data":"d5d1f0f42096c4aff4277bda3d652598a74eb238e107e9e014b0a330f063a803"} Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.759880 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" event={"ID":"2d3369dc-6ada-4806-8bf7-088fe4c0b913","Type":"ContainerStarted","Data":"b4134ee2d1739caebedf7baf9dc4735a5762c603418b6a0beb4cd0be60b53fc9"} Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.761066 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" event={"ID":"202d189d-cd09-4574-bea4-ad76a0b82cc4","Type":"ContainerStarted","Data":"d4a56acb3da74dd0a67832cbf3e54ffb565e8a18004dc334a869e5e091289112"} Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.764076 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" event={"ID":"5031223b-1c18-4996-91c6-9a8a0db7a2eb","Type":"ContainerStarted","Data":"3e7a255bf65d0787974518b2dab6932e7cc6c6b4eee970086a34f519f7d38cda"} Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.775426 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.777111 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.777296 4622 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: E1126 11:24:25.777370 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert podName:421c5332-a5be-406b-a039-e44918747042 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:26.777349706 +0000 UTC m=+826.368561229 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert") pod "openstack-baremetal-operator-controller-manager-674cb676c8pzq88" (UID: "421c5332-a5be-406b-a039-e44918747042") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.811719 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.816391 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.830515 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.938626 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.945933 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp"] Nov 26 11:24:25 crc kubenswrapper[4622]: I1126 11:24:25.966691 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw"] Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.078167 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz"] Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.093954 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.094095 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.094357 4622 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.094411 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:27.094393197 +0000 UTC m=+826.685604709 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "metrics-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.094732 4622 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.094826 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:27.094806116 +0000 UTC m=+826.686017638 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.096428 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88"] Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.132825 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj"] Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.156056 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:72236301580ff9080f7e311b832d7ba66666a9afeda51f969745229624ff26e4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x5fjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-d77b94747-zhrsj_openstack-operators(d30b60e7-2165-4660-a037-5a3c47f807e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.157403 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz"] Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.159344 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x5fjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-d77b94747-zhrsj_openstack-operators(d30b60e7-2165-4660-a037-5a3c47f807e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.161373 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" podUID="d30b60e7-2165-4660-a037-5a3c47f807e5" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.169795 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bzj6k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-57988cc5b5-mn2wz_openstack-operators(0694f996-33c7-4190-b883-0e404ce778f7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.169870 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm"] Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.176139 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bzj6k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-57988cc5b5-mn2wz_openstack-operators(0694f996-33c7-4190-b883-0e404ce778f7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.177222 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" podUID="0694f996-33c7-4190-b883-0e404ce778f7" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.177353 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk"] Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.185115 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2"] Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.189248 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw"] Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.193429 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tt6gf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd6c7f4c8-9zzhk_openstack-operators(8ef536a2-d861-4672-94e6-5058fc76eb85): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.193643 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:d65dbfc956e9cf376f3c48fc3a0942cb7306b5164f898c40d1efca106df81db7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9r548,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-67cb4dc6d4-2xzvm_openstack-operators(3ccaeddc-377f-4950-b942-9420c4bbeaa6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.195564 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tt6gf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd6c7f4c8-9zzhk_openstack-operators(8ef536a2-d861-4672-94e6-5058fc76eb85): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: W1126 11:24:26.196011 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod671c4f3d_053f_4261_bac4_537e9f4c88fa.slice/crio-4d3a7afcdeed762f0bc4077bd019b5147f92a26d895b477a41b4bec19a43b0d8 WatchSource:0}: Error finding container 4d3a7afcdeed762f0bc4077bd019b5147f92a26d895b477a41b4bec19a43b0d8: Status 404 returned error can't find the container with id 4d3a7afcdeed762f0bc4077bd019b5147f92a26d895b477a41b4bec19a43b0d8 Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.196172 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9r548,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-67cb4dc6d4-2xzvm_openstack-operators(3ccaeddc-377f-4950-b942-9420c4bbeaa6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.196688 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" podUID="8ef536a2-d861-4672-94e6-5058fc76eb85" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.197467 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" podUID="3ccaeddc-377f-4950-b942-9420c4bbeaa6" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.199588 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jpb7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-8rlt2_openstack-operators(c048fbdd-e084-4e64-9d3b-c9d8083b9019): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.199757 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lbkbb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-656dcb59d4-mrntw_openstack-operators(671c4f3d-053f-4261-bac4-537e9f4c88fa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.201956 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jpb7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-8rlt2_openstack-operators(c048fbdd-e084-4e64-9d3b-c9d8083b9019): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.202466 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lbkbb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-656dcb59d4-mrntw_openstack-operators(671c4f3d-053f-4261-bac4-537e9f4c88fa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.203268 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" podUID="c048fbdd-e084-4e64-9d3b-c9d8083b9019" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.204421 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" podUID="671c4f3d-053f-4261-bac4-537e9f4c88fa" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.321335 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw"] Nov 26 11:24:26 crc kubenswrapper[4622]: W1126 11:24:26.326761 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd402353c_5e0f_4f3c_b974_c327ed33adb9.slice/crio-ea133c8a258a62bc6c3d7b067f9370013a444bcda471d45cbdbf30c67340e230 WatchSource:0}: Error finding container ea133c8a258a62bc6c3d7b067f9370013a444bcda471d45cbdbf30c67340e230: Status 404 returned error can't find the container with id ea133c8a258a62bc6c3d7b067f9370013a444bcda471d45cbdbf30c67340e230 Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.500698 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.500843 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.500928 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:28.500908932 +0000 UTC m=+828.092120455 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.782417 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" event={"ID":"103f81dd-36e7-412f-a756-663e7d366a3a","Type":"ContainerStarted","Data":"69dd80b78c2235d6e1fa2e90d474fca05a79e7f6e5fa472fd680d557dda7c385"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.784223 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" event={"ID":"79a68295-8240-4081-8c98-f75e2a04160b","Type":"ContainerStarted","Data":"471cd0b2b787bc785915d13d4423be307194784e9b668c72e262f9920a59da1f"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.792654 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" event={"ID":"ff9243ae-4f66-4399-9ab0-67d195b2319f","Type":"ContainerStarted","Data":"72e5285a778f4dd49a490dc68142156942ccc4eaf3a039d6458d750437ef1255"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.796862 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" event={"ID":"43521a88-13da-419a-9af7-b13a9c7c11f8","Type":"ContainerStarted","Data":"e6bbeec26f362c56005826a973047def81ebc32b316152a51e38d04b41b2f66a"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.798755 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" event={"ID":"8ef536a2-d861-4672-94e6-5058fc76eb85","Type":"ContainerStarted","Data":"55b832b9684f15b6a917dd07ba849454684590588049791c18a2d6ab30f911cb"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.800839 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" event={"ID":"d402353c-5e0f-4f3c-b974-c327ed33adb9","Type":"ContainerStarted","Data":"ea133c8a258a62bc6c3d7b067f9370013a444bcda471d45cbdbf30c67340e230"} Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.801865 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" podUID="8ef536a2-d861-4672-94e6-5058fc76eb85" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.803300 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" event={"ID":"c048fbdd-e084-4e64-9d3b-c9d8083b9019","Type":"ContainerStarted","Data":"947f2206a2b6141a22b7be4486e5acf6eae3a419f9a2580faf677503167b10eb"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.806454 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" event={"ID":"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a","Type":"ContainerStarted","Data":"29558f926540582ebe97e2f1190bfa3ab0c21eee312f75de33ddcb42cf233f71"} Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.807566 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" podUID="c048fbdd-e084-4e64-9d3b-c9d8083b9019" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.807934 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.808136 4622 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.808189 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert podName:421c5332-a5be-406b-a039-e44918747042 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:28.808168463 +0000 UTC m=+828.399379984 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert") pod "openstack-baremetal-operator-controller-manager-674cb676c8pzq88" (UID: "421c5332-a5be-406b-a039-e44918747042") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.812840 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" event={"ID":"3ccaeddc-377f-4950-b942-9420c4bbeaa6","Type":"ContainerStarted","Data":"4ceedde952898667c27e2fde068e1accd8aadbf03c6096b483b4dc82f6b880a7"} Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.815250 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:d65dbfc956e9cf376f3c48fc3a0942cb7306b5164f898c40d1efca106df81db7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" podUID="3ccaeddc-377f-4950-b942-9420c4bbeaa6" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.816325 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" event={"ID":"0694f996-33c7-4190-b883-0e404ce778f7","Type":"ContainerStarted","Data":"d78808bdd58cf61900c425a4dcebed26f637a1479230716f4775f6de2df51bd6"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.821545 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" event={"ID":"671c4f3d-053f-4261-bac4-537e9f4c88fa","Type":"ContainerStarted","Data":"4d3a7afcdeed762f0bc4077bd019b5147f92a26d895b477a41b4bec19a43b0d8"} Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.823472 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" podUID="0694f996-33c7-4190-b883-0e404ce778f7" Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.825537 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" podUID="671c4f3d-053f-4261-bac4-537e9f4c88fa" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.836461 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" event={"ID":"56fff709-6338-4ce4-883c-699491e7d55e","Type":"ContainerStarted","Data":"7e31c44aac706bf62861d6c7a0356c58183c4a3d193305efca9c0560a479e589"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.837487 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" event={"ID":"d30b60e7-2165-4660-a037-5a3c47f807e5","Type":"ContainerStarted","Data":"fd4013718b4c65581e9b934800b9ba3aaedecf72fc041d3f19b51aac97c73bc2"} Nov 26 11:24:26 crc kubenswrapper[4622]: E1126 11:24:26.839753 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:72236301580ff9080f7e311b832d7ba66666a9afeda51f969745229624ff26e4\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" podUID="d30b60e7-2165-4660-a037-5a3c47f807e5" Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.841379 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" event={"ID":"9a113fe4-1281-4a2f-896c-bac8b89ce952","Type":"ContainerStarted","Data":"243b075be0a2104474a36db38a9c2eb343b7ae560d0b0abcec572c7e9e3ee0c4"} Nov 26 11:24:26 crc kubenswrapper[4622]: I1126 11:24:26.847871 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" event={"ID":"2596bf62-d165-4a25-95b6-888c3e8f5caa","Type":"ContainerStarted","Data":"26e53f74d8efd269cc620c07595111783be43a475de3e1d0af6a7fda42a72dbc"} Nov 26 11:24:27 crc kubenswrapper[4622]: I1126 11:24:27.113448 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:27 crc kubenswrapper[4622]: I1126 11:24:27.113577 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.113702 4622 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.113805 4622 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.113817 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:29.113789462 +0000 UTC m=+828.705000984 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "webhook-server-cert" not found Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.113958 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:29.11391531 +0000 UTC m=+828.705126831 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "metrics-server-cert" not found Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.863699 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:d65dbfc956e9cf376f3c48fc3a0942cb7306b5164f898c40d1efca106df81db7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" podUID="3ccaeddc-377f-4950-b942-9420c4bbeaa6" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.864061 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" podUID="0694f996-33c7-4190-b883-0e404ce778f7" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.864429 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" podUID="671c4f3d-053f-4261-bac4-537e9f4c88fa" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.864438 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:72236301580ff9080f7e311b832d7ba66666a9afeda51f969745229624ff26e4\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" podUID="d30b60e7-2165-4660-a037-5a3c47f807e5" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.864472 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" podUID="8ef536a2-d861-4672-94e6-5058fc76eb85" Nov 26 11:24:27 crc kubenswrapper[4622]: E1126 11:24:27.865779 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" podUID="c048fbdd-e084-4e64-9d3b-c9d8083b9019" Nov 26 11:24:28 crc kubenswrapper[4622]: I1126 11:24:28.532149 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:28 crc kubenswrapper[4622]: E1126 11:24:28.532367 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:28 crc kubenswrapper[4622]: E1126 11:24:28.532465 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:32.532444902 +0000 UTC m=+832.123656424 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:28 crc kubenswrapper[4622]: I1126 11:24:28.837260 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:28 crc kubenswrapper[4622]: E1126 11:24:28.838073 4622 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:28 crc kubenswrapper[4622]: E1126 11:24:28.838141 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert podName:421c5332-a5be-406b-a039-e44918747042 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:32.838122087 +0000 UTC m=+832.429333609 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert") pod "openstack-baremetal-operator-controller-manager-674cb676c8pzq88" (UID: "421c5332-a5be-406b-a039-e44918747042") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:29 crc kubenswrapper[4622]: I1126 11:24:29.141118 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:29 crc kubenswrapper[4622]: E1126 11:24:29.141296 4622 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 26 11:24:29 crc kubenswrapper[4622]: E1126 11:24:29.141589 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:33.14156636 +0000 UTC m=+832.732777881 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "metrics-server-cert" not found Nov 26 11:24:29 crc kubenswrapper[4622]: I1126 11:24:29.141660 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:29 crc kubenswrapper[4622]: E1126 11:24:29.141892 4622 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 26 11:24:29 crc kubenswrapper[4622]: E1126 11:24:29.142044 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:33.142019454 +0000 UTC m=+832.733230976 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "webhook-server-cert" not found Nov 26 11:24:32 crc kubenswrapper[4622]: I1126 11:24:32.602898 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:32 crc kubenswrapper[4622]: E1126 11:24:32.603116 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:32 crc kubenswrapper[4622]: E1126 11:24:32.603392 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:40.603371029 +0000 UTC m=+840.194582551 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:32 crc kubenswrapper[4622]: I1126 11:24:32.909576 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:32 crc kubenswrapper[4622]: E1126 11:24:32.909747 4622 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:32 crc kubenswrapper[4622]: E1126 11:24:32.909814 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert podName:421c5332-a5be-406b-a039-e44918747042 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:40.909798861 +0000 UTC m=+840.501010383 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert") pod "openstack-baremetal-operator-controller-manager-674cb676c8pzq88" (UID: "421c5332-a5be-406b-a039-e44918747042") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 26 11:24:33 crc kubenswrapper[4622]: I1126 11:24:33.218358 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:33 crc kubenswrapper[4622]: I1126 11:24:33.218541 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:33 crc kubenswrapper[4622]: E1126 11:24:33.218611 4622 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 26 11:24:33 crc kubenswrapper[4622]: E1126 11:24:33.218708 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:41.218687074 +0000 UTC m=+840.809898597 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "webhook-server-cert" not found Nov 26 11:24:33 crc kubenswrapper[4622]: E1126 11:24:33.218751 4622 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 26 11:24:33 crc kubenswrapper[4622]: E1126 11:24:33.218830 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs podName:903a6422-4d68-4aab-96e8-25452ffab66d nodeName:}" failed. No retries permitted until 2025-11-26 11:24:41.218812191 +0000 UTC m=+840.810023713 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs") pod "openstack-operator-controller-manager-659d75f7c6-q9vn7" (UID: "903a6422-4d68-4aab-96e8-25452ffab66d") : secret "metrics-server-cert" not found Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.564754 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rz75c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-955677c94-88ql9_openstack-operators(202d189d-cd09-4574-bea4-ad76a0b82cc4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.566711 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" podUID="202d189d-cd09-4574-bea4-ad76a0b82cc4" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.568141 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-296vj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5d499bf58b-nrn55_openstack-operators(ff9243ae-4f66-4399-9ab0-67d195b2319f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.569800 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" podUID="ff9243ae-4f66-4399-9ab0-67d195b2319f" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.573140 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9ns67,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-79556f57fc-wdjgz_openstack-operators(79a68295-8240-4081-8c98-f75e2a04160b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.574627 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" podUID="79a68295-8240-4081-8c98-f75e2a04160b" Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.923082 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" event={"ID":"103f81dd-36e7-412f-a756-663e7d366a3a","Type":"ContainerStarted","Data":"3eeb4a73cdb922bcc1b89281b244d55acfe5cf14731e8071f7242495b7533aa1"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.928527 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" event={"ID":"56fff709-6338-4ce4-883c-699491e7d55e","Type":"ContainerStarted","Data":"ac8da5d1fd08f46533c07e3c73917e8d30da3eb4e05e16075449c8988e06d47a"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.929862 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" event={"ID":"2d3369dc-6ada-4806-8bf7-088fe4c0b913","Type":"ContainerStarted","Data":"13eedeffa60dad99129511d1cabacbe109d49d477f8f7b6bf97f3b1e9855c440"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.931151 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" event={"ID":"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a","Type":"ContainerStarted","Data":"af617c088057c25db6524454a2da0614595e2278108ab77fa9cdb5eac87bdcf1"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.932827 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" event={"ID":"202d189d-cd09-4574-bea4-ad76a0b82cc4","Type":"ContainerStarted","Data":"e58c37e7a3cc7e1153a6714a4ec7de1499fe00938323d628bbed8b1128715818"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.932967 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.934458 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" podUID="202d189d-cd09-4574-bea4-ad76a0b82cc4" Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.934882 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" event={"ID":"2596bf62-d165-4a25-95b6-888c3e8f5caa","Type":"ContainerStarted","Data":"b2fd8aa7235006373cf8afac77778b0290fe0093065f862a7fc8fafa38f52069"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.936485 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" event={"ID":"d402353c-5e0f-4f3c-b974-c327ed33adb9","Type":"ContainerStarted","Data":"285cfa08e434464845d0db843002a9b3ec95ceeb457f463acbfd05409f40d2e5"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.937958 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" event={"ID":"9a113fe4-1281-4a2f-896c-bac8b89ce952","Type":"ContainerStarted","Data":"90d43c04619401f0c80c345c85fdc8bf7614d656b3d90d9a02bd1af621a18e96"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.939604 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" event={"ID":"5031223b-1c18-4996-91c6-9a8a0db7a2eb","Type":"ContainerStarted","Data":"547f6868e28ee8daf0ca6f543cc3722100b71d635a2314000d3fa2e4f0f371d2"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.943608 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" event={"ID":"79a68295-8240-4081-8c98-f75e2a04160b","Type":"ContainerStarted","Data":"d2449729f9f2f42ac36944c768549afbe2edf051bda5aff1de794ddc51a560a9"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.943734 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.944885 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" podUID="79a68295-8240-4081-8c98-f75e2a04160b" Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.945216 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" event={"ID":"ff9243ae-4f66-4399-9ab0-67d195b2319f","Type":"ContainerStarted","Data":"459ceef561ee59356424ae6f346a341d9d351006204792382df9cd3ab7db11e9"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.945266 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:35 crc kubenswrapper[4622]: E1126 11:24:35.946380 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" podUID="ff9243ae-4f66-4399-9ab0-67d195b2319f" Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.946828 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" event={"ID":"43521a88-13da-419a-9af7-b13a9c7c11f8","Type":"ContainerStarted","Data":"31af3ead2e4f7a78e8293eab213e285197a0cfeb4e37db0c7df054430cf15f16"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.948300 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" event={"ID":"e952c5f2-5b48-4303-a533-e838c7a72c24","Type":"ContainerStarted","Data":"82a9c76d571cb7fa0ce7f6d3ca0f0e5866e78b375e182e9aaa4fd908494f320a"} Nov 26 11:24:35 crc kubenswrapper[4622]: I1126 11:24:35.951555 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" event={"ID":"b86c4e95-2314-4759-9b74-77c0c811fc82","Type":"ContainerStarted","Data":"7e630633287783726885abf39cc62172c717071ef93d7f2a8048ea6952843586"} Nov 26 11:24:36 crc kubenswrapper[4622]: I1126 11:24:36.006783 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xsfhw" podStartSLOduration=2.130821987 podStartE2EDuration="11.006763414s" podCreationTimestamp="2025-11-26 11:24:25 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.330227611 +0000 UTC m=+825.921439133" lastFinishedPulling="2025-11-26 11:24:35.206169038 +0000 UTC m=+834.797380560" observedRunningTime="2025-11-26 11:24:35.999437401 +0000 UTC m=+835.590648924" watchObservedRunningTime="2025-11-26 11:24:36.006763414 +0000 UTC m=+835.597974936" Nov 26 11:24:36 crc kubenswrapper[4622]: E1126 11:24:36.961131 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" podUID="79a68295-8240-4081-8c98-f75e2a04160b" Nov 26 11:24:36 crc kubenswrapper[4622]: E1126 11:24:36.961430 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" podUID="202d189d-cd09-4574-bea4-ad76a0b82cc4" Nov 26 11:24:36 crc kubenswrapper[4622]: E1126 11:24:36.961472 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" podUID="ff9243ae-4f66-4399-9ab0-67d195b2319f" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.985538 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" event={"ID":"2596bf62-d165-4a25-95b6-888c3e8f5caa","Type":"ContainerStarted","Data":"13c9e5ad779b0a1717b968bd9a46412e3939a76e0514be906c74d1b1fe70df73"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.986367 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.988772 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" event={"ID":"9a113fe4-1281-4a2f-896c-bac8b89ce952","Type":"ContainerStarted","Data":"bc17c3b1bd238e00655a498342f2ef9ca50069cbdf1a46afbe22801679a02856"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.989040 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.990028 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" event={"ID":"2d3369dc-6ada-4806-8bf7-088fe4c0b913","Type":"ContainerStarted","Data":"38225da23055d9486775ae6cb58b5bc7ea9690596a6f841025ba656b72d9efd4"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.990179 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.993015 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" event={"ID":"5031223b-1c18-4996-91c6-9a8a0db7a2eb","Type":"ContainerStarted","Data":"0b7ba2d2252158d744a5c0d172728ae79451d1e3755b26ccd9672a500643a840"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.993151 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.994413 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" event={"ID":"56fff709-6338-4ce4-883c-699491e7d55e","Type":"ContainerStarted","Data":"bc0b34a457dbf2ccbb8caf667ce5f69594930a461511eeea6e4f12f198e67975"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.994899 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.996400 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" event={"ID":"e952c5f2-5b48-4303-a533-e838c7a72c24","Type":"ContainerStarted","Data":"3131f6c5ad49b7991a0d53c989232fc155c9e6d22f7d1587ec8d313cf6744af6"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.996543 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.997915 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" event={"ID":"103f81dd-36e7-412f-a756-663e7d366a3a","Type":"ContainerStarted","Data":"5d9bbb3cf9b4e0d904b631f9ce8306024cf0d44342d25af7ca80982a53605255"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.997958 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.999293 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" event={"ID":"e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a","Type":"ContainerStarted","Data":"f6e9761e7b0958637f87e9f20994d17a00cd4b8fd88f713dc2721e270267e067"} Nov 26 11:24:39 crc kubenswrapper[4622]: I1126 11:24:39.999375 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.000835 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" event={"ID":"b86c4e95-2314-4759-9b74-77c0c811fc82","Type":"ContainerStarted","Data":"a5c48baeed58cfb95045528767c59c70daf5920c726e33d5dc475ee9eab1d232"} Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.001165 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.002168 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" event={"ID":"43521a88-13da-419a-9af7-b13a9c7c11f8","Type":"ContainerStarted","Data":"d2ffe4d42cddd3d5053e28206b220e5bbc4db5279a783c03f8641f962d0a1be3"} Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.002694 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.002932 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" podStartSLOduration=3.070549793 podStartE2EDuration="16.002920246s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.988549924 +0000 UTC m=+825.579761446" lastFinishedPulling="2025-11-26 11:24:38.920920387 +0000 UTC m=+838.512131899" observedRunningTime="2025-11-26 11:24:40.001531345 +0000 UTC m=+839.592742868" watchObservedRunningTime="2025-11-26 11:24:40.002920246 +0000 UTC m=+839.594131768" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.003346 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.004322 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.018072 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" podStartSLOduration=3.189743844 podStartE2EDuration="16.018058998s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.988352482 +0000 UTC m=+825.579564004" lastFinishedPulling="2025-11-26 11:24:38.816667636 +0000 UTC m=+838.407879158" observedRunningTime="2025-11-26 11:24:40.015809474 +0000 UTC m=+839.607020996" watchObservedRunningTime="2025-11-26 11:24:40.018058998 +0000 UTC m=+839.609270520" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.026641 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" podStartSLOduration=3.192157088 podStartE2EDuration="16.026620501s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.982729363 +0000 UTC m=+825.573940885" lastFinishedPulling="2025-11-26 11:24:38.817192777 +0000 UTC m=+838.408404298" observedRunningTime="2025-11-26 11:24:40.025400739 +0000 UTC m=+839.616612262" watchObservedRunningTime="2025-11-26 11:24:40.026620501 +0000 UTC m=+839.617832022" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.042071 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" podStartSLOduration=2.910536809 podStartE2EDuration="16.042059048s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.719041257 +0000 UTC m=+825.310252790" lastFinishedPulling="2025-11-26 11:24:38.850563507 +0000 UTC m=+838.441775029" observedRunningTime="2025-11-26 11:24:40.037934858 +0000 UTC m=+839.629146380" watchObservedRunningTime="2025-11-26 11:24:40.042059048 +0000 UTC m=+839.633270570" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.051195 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" podStartSLOduration=2.8424456830000002 podStartE2EDuration="16.051178234s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.606804273 +0000 UTC m=+825.198015794" lastFinishedPulling="2025-11-26 11:24:38.815536823 +0000 UTC m=+838.406748345" observedRunningTime="2025-11-26 11:24:40.050163498 +0000 UTC m=+839.641375021" watchObservedRunningTime="2025-11-26 11:24:40.051178234 +0000 UTC m=+839.642389755" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.064333 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-lrt59" podStartSLOduration=2.9611442500000003 podStartE2EDuration="16.064314137s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.711897831 +0000 UTC m=+825.303109352" lastFinishedPulling="2025-11-26 11:24:38.815067717 +0000 UTC m=+838.406279239" observedRunningTime="2025-11-26 11:24:40.062845806 +0000 UTC m=+839.654057328" watchObservedRunningTime="2025-11-26 11:24:40.064314137 +0000 UTC m=+839.655525659" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.075815 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" podStartSLOduration=3.058716667 podStartE2EDuration="16.075803804s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.834130525 +0000 UTC m=+825.425342046" lastFinishedPulling="2025-11-26 11:24:38.851217661 +0000 UTC m=+838.442429183" observedRunningTime="2025-11-26 11:24:40.072055562 +0000 UTC m=+839.663267084" watchObservedRunningTime="2025-11-26 11:24:40.075803804 +0000 UTC m=+839.667015326" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.087634 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" podStartSLOduration=2.836924016 podStartE2EDuration="16.087616601s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.600579629 +0000 UTC m=+825.191791151" lastFinishedPulling="2025-11-26 11:24:38.851272213 +0000 UTC m=+838.442483736" observedRunningTime="2025-11-26 11:24:40.082730092 +0000 UTC m=+839.673941614" watchObservedRunningTime="2025-11-26 11:24:40.087616601 +0000 UTC m=+839.678828123" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.095049 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" podStartSLOduration=3.424133492 podStartE2EDuration="16.095034887s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.141271902 +0000 UTC m=+825.732483424" lastFinishedPulling="2025-11-26 11:24:38.812173296 +0000 UTC m=+838.403384819" observedRunningTime="2025-11-26 11:24:40.09233276 +0000 UTC m=+839.683544281" watchObservedRunningTime="2025-11-26 11:24:40.095034887 +0000 UTC m=+839.686246409" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.115538 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-b4qw6" podStartSLOduration=3.059533759 podStartE2EDuration="16.115520307s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.850361647 +0000 UTC m=+825.441573169" lastFinishedPulling="2025-11-26 11:24:38.906348204 +0000 UTC m=+838.497559717" observedRunningTime="2025-11-26 11:24:40.113726923 +0000 UTC m=+839.704938445" watchObservedRunningTime="2025-11-26 11:24:40.115520307 +0000 UTC m=+839.706731829" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.654740 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:40 crc kubenswrapper[4622]: E1126 11:24:40.654939 4622 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:40 crc kubenswrapper[4622]: E1126 11:24:40.655033 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert podName:af470876-f83d-453f-bc58-96c91cabc509 nodeName:}" failed. No retries permitted until 2025-11-26 11:24:56.655016694 +0000 UTC m=+856.246228216 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert") pod "infra-operator-controller-manager-57548d458d-gzvb9" (UID: "af470876-f83d-453f-bc58-96c91cabc509") : secret "infra-operator-webhook-server-cert" not found Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.962895 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:40 crc kubenswrapper[4622]: I1126 11:24:40.983245 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421c5332-a5be-406b-a039-e44918747042-cert\") pod \"openstack-baremetal-operator-controller-manager-674cb676c8pzq88\" (UID: \"421c5332-a5be-406b-a039-e44918747042\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.026113 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-jvn88" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.026290 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-mrcsw" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.026392 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-9slpq" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.026918 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-8mgd6" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.027044 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-8h8x8" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.027103 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-zc4pp" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.027127 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-k2ggr" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.027147 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-4244k" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.244272 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-dt5t8" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.253421 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.266662 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.266751 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.270408 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-webhook-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.270871 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/903a6422-4d68-4aab-96e8-25452ffab66d-metrics-certs\") pod \"openstack-operator-controller-manager-659d75f7c6-q9vn7\" (UID: \"903a6422-4d68-4aab-96e8-25452ffab66d\") " pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.564191 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-chjn4" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.572758 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:41 crc kubenswrapper[4622]: I1126 11:24:41.649320 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88"] Nov 26 11:24:42 crc kubenswrapper[4622]: W1126 11:24:42.151722 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod421c5332_a5be_406b_a039_e44918747042.slice/crio-2f06e21c51f9ec794ad376b156491c358105764326707f296e33e1cc026db9de WatchSource:0}: Error finding container 2f06e21c51f9ec794ad376b156491c358105764326707f296e33e1cc026db9de: Status 404 returned error can't find the container with id 2f06e21c51f9ec794ad376b156491c358105764326707f296e33e1cc026db9de Nov 26 11:24:42 crc kubenswrapper[4622]: I1126 11:24:42.353623 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7"] Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.036340 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" event={"ID":"d30b60e7-2165-4660-a037-5a3c47f807e5","Type":"ContainerStarted","Data":"873a04facac15364ad218d4448b7ec1b4d05cc31a6d54ddf793fbe67814288ba"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.036680 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" event={"ID":"d30b60e7-2165-4660-a037-5a3c47f807e5","Type":"ContainerStarted","Data":"a22f6e1696475ce7d102974ed01ff98a9fbdac7a4eb63b2189a58cd43369cbd5"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.037666 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.039391 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" event={"ID":"421c5332-a5be-406b-a039-e44918747042","Type":"ContainerStarted","Data":"2f06e21c51f9ec794ad376b156491c358105764326707f296e33e1cc026db9de"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.043038 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" event={"ID":"8ef536a2-d861-4672-94e6-5058fc76eb85","Type":"ContainerStarted","Data":"bf4e1d9840a17cdd2076dcbd59e1f20c52d0c16be07c3857824622ab2f0cbee2"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.043085 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" event={"ID":"8ef536a2-d861-4672-94e6-5058fc76eb85","Type":"ContainerStarted","Data":"16306cbfc032bfa213cb1332af32c786829bb0797a6dbffb3c52e816cbf0673e"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.043249 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.044099 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" event={"ID":"903a6422-4d68-4aab-96e8-25452ffab66d","Type":"ContainerStarted","Data":"1435451100be512f1a9777c6f3ab445e37d144e38cf8467fd53ebeb71d302850"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.044131 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" event={"ID":"903a6422-4d68-4aab-96e8-25452ffab66d","Type":"ContainerStarted","Data":"1a8c060225da57dddcf32ef1c7b606e8e140efd91a31b6eea302c7e59f937ded"} Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.044448 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.068348 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" podStartSLOduration=2.084969027 podStartE2EDuration="18.068333652s" podCreationTimestamp="2025-11-26 11:24:25 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.193276488 +0000 UTC m=+825.784488000" lastFinishedPulling="2025-11-26 11:24:42.176641104 +0000 UTC m=+841.767852625" observedRunningTime="2025-11-26 11:24:43.061693865 +0000 UTC m=+842.652905387" watchObservedRunningTime="2025-11-26 11:24:43.068333652 +0000 UTC m=+842.659545175" Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.073695 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" podStartSLOduration=3.04601303 podStartE2EDuration="19.073684378s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.155864674 +0000 UTC m=+825.747076196" lastFinishedPulling="2025-11-26 11:24:42.183536022 +0000 UTC m=+841.774747544" observedRunningTime="2025-11-26 11:24:43.052540215 +0000 UTC m=+842.643751738" watchObservedRunningTime="2025-11-26 11:24:43.073684378 +0000 UTC m=+842.664895899" Nov 26 11:24:43 crc kubenswrapper[4622]: I1126 11:24:43.087576 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" podStartSLOduration=18.087554306 podStartE2EDuration="18.087554306s" podCreationTimestamp="2025-11-26 11:24:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:24:43.082336221 +0000 UTC m=+842.673547742" watchObservedRunningTime="2025-11-26 11:24:43.087554306 +0000 UTC m=+842.678765827" Nov 26 11:24:44 crc kubenswrapper[4622]: I1126 11:24:44.961642 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" Nov 26 11:24:45 crc kubenswrapper[4622]: I1126 11:24:45.174925 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" Nov 26 11:24:45 crc kubenswrapper[4622]: I1126 11:24:45.265193 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.077752 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" event={"ID":"421c5332-a5be-406b-a039-e44918747042","Type":"ContainerStarted","Data":"c1057e9bbfb22363e7413ea82ee77bfe1edee2d93d5e6c13ded86e44d5ecd09f"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.078163 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" event={"ID":"421c5332-a5be-406b-a039-e44918747042","Type":"ContainerStarted","Data":"bf723fb7c0bae9e4cc35f330da5566bc772e664ae20acbdb225283291b3a7326"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.078582 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.081149 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" event={"ID":"0694f996-33c7-4190-b883-0e404ce778f7","Type":"ContainerStarted","Data":"d26c182c04b6065df037ab3d12c6cd17410844581505ad454f8c5e7da07f5be3"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.081232 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" event={"ID":"0694f996-33c7-4190-b883-0e404ce778f7","Type":"ContainerStarted","Data":"2cf46705bdab35a4e9af81b3f488116e1b866160086c4e238bde6a45f8ffcf52"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.081414 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.084104 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" event={"ID":"c048fbdd-e084-4e64-9d3b-c9d8083b9019","Type":"ContainerStarted","Data":"8c218a96f2083dafd91654b689ee99081cb9ab8f657bc9bc9a0cbc9a61fc5f09"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.084148 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" event={"ID":"c048fbdd-e084-4e64-9d3b-c9d8083b9019","Type":"ContainerStarted","Data":"73b94de2fa903478e9333e34dba28572c1250d2140d4f2cdef18ccd496c91115"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.084404 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.085994 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" event={"ID":"671c4f3d-053f-4261-bac4-537e9f4c88fa","Type":"ContainerStarted","Data":"819a9f5fef4c85f87f87819fc0212620048a179b3ea3837f37a0f84b5b97ba1b"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.086056 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" event={"ID":"671c4f3d-053f-4261-bac4-537e9f4c88fa","Type":"ContainerStarted","Data":"a68a00bb91c3b695fbea74e3031b7f5fb5b1c72f10f9572838eb1cb090de0763"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.086299 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.088153 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" event={"ID":"79a68295-8240-4081-8c98-f75e2a04160b","Type":"ContainerStarted","Data":"9647289513bb83dfacb1db24dfd309e93781f7f45881c0c43689d8fc974026b8"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.090685 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" event={"ID":"3ccaeddc-377f-4950-b942-9420c4bbeaa6","Type":"ContainerStarted","Data":"da469cda3c34b7520d12fbb068b39edab5010eff6f3054d9aca9378024a7c53f"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.090725 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" event={"ID":"3ccaeddc-377f-4950-b942-9420c4bbeaa6","Type":"ContainerStarted","Data":"c4fdcd55661a373a36a9537ea086b16c50d5ecac09568adf64017808a688b8e4"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.090996 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.093349 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" event={"ID":"ff9243ae-4f66-4399-9ab0-67d195b2319f","Type":"ContainerStarted","Data":"c348f0112bc2ee117043db03b0102adb79da62415fdf12ec0411745ff7e52168"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.095736 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" event={"ID":"202d189d-cd09-4574-bea4-ad76a0b82cc4","Type":"ContainerStarted","Data":"025adf5474bd1b76abbcb0e5655aaba3006ed0abbcd3e2599a498fa8d56db5b6"} Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.108991 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" podStartSLOduration=19.587272935 podStartE2EDuration="23.10897913s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:42.167974682 +0000 UTC m=+841.759186204" lastFinishedPulling="2025-11-26 11:24:45.689680877 +0000 UTC m=+845.280892399" observedRunningTime="2025-11-26 11:24:47.102920889 +0000 UTC m=+846.694132411" watchObservedRunningTime="2025-11-26 11:24:47.10897913 +0000 UTC m=+846.700190652" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.125585 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" podStartSLOduration=3.654140442 podStartE2EDuration="23.125566475s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.193485533 +0000 UTC m=+825.784697055" lastFinishedPulling="2025-11-26 11:24:45.664911566 +0000 UTC m=+845.256123088" observedRunningTime="2025-11-26 11:24:47.120429462 +0000 UTC m=+846.711640985" watchObservedRunningTime="2025-11-26 11:24:47.125566475 +0000 UTC m=+846.716777997" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.140743 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-nrn55" podStartSLOduration=13.834657887 podStartE2EDuration="23.140731196s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.851966736 +0000 UTC m=+825.443178257" lastFinishedPulling="2025-11-26 11:24:35.158040045 +0000 UTC m=+834.749251566" observedRunningTime="2025-11-26 11:24:47.13559727 +0000 UTC m=+846.726808811" watchObservedRunningTime="2025-11-26 11:24:47.140731196 +0000 UTC m=+846.731942718" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.169025 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" podStartSLOduration=3.646696118 podStartE2EDuration="23.168985503s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.199514418 +0000 UTC m=+825.790725940" lastFinishedPulling="2025-11-26 11:24:45.721803803 +0000 UTC m=+845.313015325" observedRunningTime="2025-11-26 11:24:47.151577279 +0000 UTC m=+846.742788811" watchObservedRunningTime="2025-11-26 11:24:47.168985503 +0000 UTC m=+846.760197025" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.177568 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" podStartSLOduration=3.65779675 podStartE2EDuration="23.177552535s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.169678016 +0000 UTC m=+825.760889538" lastFinishedPulling="2025-11-26 11:24:45.689433801 +0000 UTC m=+845.280645323" observedRunningTime="2025-11-26 11:24:47.176946823 +0000 UTC m=+846.768158345" watchObservedRunningTime="2025-11-26 11:24:47.177552535 +0000 UTC m=+846.768764058" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.203078 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" podStartSLOduration=2.725185441 podStartE2EDuration="22.20306506s" podCreationTimestamp="2025-11-26 11:24:25 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.198598289 +0000 UTC m=+825.789809811" lastFinishedPulling="2025-11-26 11:24:45.676477908 +0000 UTC m=+845.267689430" observedRunningTime="2025-11-26 11:24:47.197780871 +0000 UTC m=+846.788992392" watchObservedRunningTime="2025-11-26 11:24:47.20306506 +0000 UTC m=+846.794276582" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.218701 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-wdjgz" podStartSLOduration=14.167720559 podStartE2EDuration="23.218690079s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:26.106632398 +0000 UTC m=+825.697843920" lastFinishedPulling="2025-11-26 11:24:35.157601918 +0000 UTC m=+834.748813440" observedRunningTime="2025-11-26 11:24:47.212676454 +0000 UTC m=+846.803887976" watchObservedRunningTime="2025-11-26 11:24:47.218690079 +0000 UTC m=+846.809901601" Nov 26 11:24:47 crc kubenswrapper[4622]: I1126 11:24:47.240328 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-955677c94-88ql9" podStartSLOduration=14.430290667 podStartE2EDuration="23.24030537s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:25.482655854 +0000 UTC m=+825.073867377" lastFinishedPulling="2025-11-26 11:24:34.292670558 +0000 UTC m=+833.883882080" observedRunningTime="2025-11-26 11:24:47.236894695 +0000 UTC m=+846.828106217" watchObservedRunningTime="2025-11-26 11:24:47.24030537 +0000 UTC m=+846.831516892" Nov 26 11:24:51 crc kubenswrapper[4622]: I1126 11:24:51.260393 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-674cb676c8pzq88" Nov 26 11:24:51 crc kubenswrapper[4622]: I1126 11:24:51.578920 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-659d75f7c6-q9vn7" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.360992 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-mn2wz" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.384830 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zhrsj" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.401930 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-8rlt2" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.417143 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-2xzvm" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.482377 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-9zzhk" Nov 26 11:24:55 crc kubenswrapper[4622]: I1126 11:24:55.534476 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-mrntw" Nov 26 11:24:56 crc kubenswrapper[4622]: I1126 11:24:56.705131 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:56 crc kubenswrapper[4622]: I1126 11:24:56.710474 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af470876-f83d-453f-bc58-96c91cabc509-cert\") pod \"infra-operator-controller-manager-57548d458d-gzvb9\" (UID: \"af470876-f83d-453f-bc58-96c91cabc509\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:56 crc kubenswrapper[4622]: I1126 11:24:56.845459 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gst4t" Nov 26 11:24:56 crc kubenswrapper[4622]: I1126 11:24:56.854699 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:24:57 crc kubenswrapper[4622]: I1126 11:24:57.208609 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9"] Nov 26 11:24:57 crc kubenswrapper[4622]: W1126 11:24:57.212718 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf470876_f83d_453f_bc58_96c91cabc509.slice/crio-0f612e0a072b6753d0103bf408eb548ed44312fed6a66ec9b60eabd744650190 WatchSource:0}: Error finding container 0f612e0a072b6753d0103bf408eb548ed44312fed6a66ec9b60eabd744650190: Status 404 returned error can't find the container with id 0f612e0a072b6753d0103bf408eb548ed44312fed6a66ec9b60eabd744650190 Nov 26 11:24:58 crc kubenswrapper[4622]: I1126 11:24:58.162466 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" event={"ID":"af470876-f83d-453f-bc58-96c91cabc509","Type":"ContainerStarted","Data":"0f612e0a072b6753d0103bf408eb548ed44312fed6a66ec9b60eabd744650190"} Nov 26 11:25:00 crc kubenswrapper[4622]: I1126 11:25:00.182233 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" event={"ID":"af470876-f83d-453f-bc58-96c91cabc509","Type":"ContainerStarted","Data":"5409b0275a2dc127b9ff4bb7fb59e9eedfcacdced38a9ce2a22c3e244327a69e"} Nov 26 11:25:00 crc kubenswrapper[4622]: I1126 11:25:00.182853 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:25:00 crc kubenswrapper[4622]: I1126 11:25:00.182866 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" event={"ID":"af470876-f83d-453f-bc58-96c91cabc509","Type":"ContainerStarted","Data":"fcbf48508983355b7a068d08d122e4f2e9ae48617df069a852f5b115059d2dac"} Nov 26 11:25:00 crc kubenswrapper[4622]: I1126 11:25:00.197259 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" podStartSLOduration=34.24646543 podStartE2EDuration="36.197234738s" podCreationTimestamp="2025-11-26 11:24:24 +0000 UTC" firstStartedPulling="2025-11-26 11:24:57.21498781 +0000 UTC m=+856.806199333" lastFinishedPulling="2025-11-26 11:24:59.165757119 +0000 UTC m=+858.756968641" observedRunningTime="2025-11-26 11:25:00.19538085 +0000 UTC m=+859.786592372" watchObservedRunningTime="2025-11-26 11:25:00.197234738 +0000 UTC m=+859.788446261" Nov 26 11:25:06 crc kubenswrapper[4622]: I1126 11:25:06.861389 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gzvb9" Nov 26 11:25:15 crc kubenswrapper[4622]: I1126 11:25:15.198916 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:25:15 crc kubenswrapper[4622]: I1126 11:25:15.199413 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.640866 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.642322 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.644388 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.644867 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.645163 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.646469 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-ss76h" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.653540 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.698590 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.700024 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.703463 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.704577 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.777053 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jn64\" (UniqueName: \"kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.777151 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.777318 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.777365 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5qc4\" (UniqueName: \"kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.777941 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.879264 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jn64\" (UniqueName: \"kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.879339 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.879365 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.879394 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5qc4\" (UniqueName: \"kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.879435 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.880263 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.880306 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.880321 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.895643 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5qc4\" (UniqueName: \"kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4\") pod \"dnsmasq-dns-7bdd77c89-6b6bm\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.895759 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jn64\" (UniqueName: \"kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64\") pod \"dnsmasq-dns-6584b49599-svnvg\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:22 crc kubenswrapper[4622]: I1126 11:25:22.961417 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:23 crc kubenswrapper[4622]: I1126 11:25:23.025053 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:23 crc kubenswrapper[4622]: I1126 11:25:23.327734 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:23 crc kubenswrapper[4622]: I1126 11:25:23.390847 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:23 crc kubenswrapper[4622]: W1126 11:25:23.392911 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5de5130_5076_495f_bc4a_4ea1e657df5e.slice/crio-16a1b8c6b410b973625e6e7eb1e4e928e88e407b9d1d110e7addd47310e70083 WatchSource:0}: Error finding container 16a1b8c6b410b973625e6e7eb1e4e928e88e407b9d1d110e7addd47310e70083: Status 404 returned error can't find the container with id 16a1b8c6b410b973625e6e7eb1e4e928e88e407b9d1d110e7addd47310e70083 Nov 26 11:25:24 crc kubenswrapper[4622]: I1126 11:25:24.338658 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6584b49599-svnvg" event={"ID":"d5de5130-5076-495f-bc4a-4ea1e657df5e","Type":"ContainerStarted","Data":"16a1b8c6b410b973625e6e7eb1e4e928e88e407b9d1d110e7addd47310e70083"} Nov 26 11:25:24 crc kubenswrapper[4622]: I1126 11:25:24.341533 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" event={"ID":"33d73b3a-6a70-461f-8dba-d76ebcb8f315","Type":"ContainerStarted","Data":"b2eca531bbe2c44930809046e3bc6ef2fc9bcbe59067be32c06cea8bc4b9cd9e"} Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.675473 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.694905 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.696482 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.707767 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.827805 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.827859 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.827945 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvpnx\" (UniqueName: \"kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.923698 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.929603 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.929652 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.929728 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvpnx\" (UniqueName: \"kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.931317 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.938103 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.946940 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.948278 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.951274 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvpnx\" (UniqueName: \"kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx\") pod \"dnsmasq-dns-7c6d9948dc-qbgfc\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:25 crc kubenswrapper[4622]: I1126 11:25:25.957496 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.017144 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.030595 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.030650 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.030715 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h5dh\" (UniqueName: \"kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.132471 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.132808 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.132958 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h5dh\" (UniqueName: \"kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.133371 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.133985 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.149923 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h5dh\" (UniqueName: \"kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh\") pod \"dnsmasq-dns-6486446b9f-qm4ht\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.281697 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.486436 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:26 crc kubenswrapper[4622]: W1126 11:25:26.533366 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3dc7c4a_baf9_4368_9c24_b9c4b82a102f.slice/crio-347bf97572d017f2c86079e489e8991499b0b286e63577769507b4229563fd86 WatchSource:0}: Error finding container 347bf97572d017f2c86079e489e8991499b0b286e63577769507b4229563fd86: Status 404 returned error can't find the container with id 347bf97572d017f2c86079e489e8991499b0b286e63577769507b4229563fd86 Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.564671 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.807519 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.808695 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.811452 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.816452 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.816771 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.816816 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.817551 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-z25df" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.818702 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.818999 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.822633 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840432 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840465 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840496 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840740 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840807 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.840941 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.841026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h65sw\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.841101 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.841146 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.841198 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.841239 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942662 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942707 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942737 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942771 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942808 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h65sw\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942847 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942871 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942931 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.942980 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.943003 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.943998 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.944119 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.944898 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.944918 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.945290 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.948086 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.948895 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.956224 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.956252 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.956888 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.958399 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h65sw\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:26 crc kubenswrapper[4622]: I1126 11:25:26.969027 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " pod="openstack/rabbitmq-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.088263 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.089929 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.091277 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.093587 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.093639 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.094239 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.094275 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.094588 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zf6th" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.095312 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.103414 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.128804 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.145954 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.145991 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6jzh\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146108 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146137 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146208 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146237 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146269 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146289 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146319 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146341 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.146370 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.248994 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249060 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249121 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249161 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249195 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249227 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249261 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249304 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249339 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249366 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.249390 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6jzh\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.253217 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.253600 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.254653 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.255004 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.255683 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.256287 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.262741 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.264991 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.268345 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.268622 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6jzh\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.268697 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.289761 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.385381 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" event={"ID":"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f","Type":"ContainerStarted","Data":"347bf97572d017f2c86079e489e8991499b0b286e63577769507b4229563fd86"} Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.388164 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" event={"ID":"968b0117-b325-47ff-aba1-87eaf3d326ad","Type":"ContainerStarted","Data":"03e4ccfa2106342c55431a376e13d20f003d6cfd9b2f81264d5fea31e9d893ad"} Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.409470 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:25:27 crc kubenswrapper[4622]: I1126 11:25:27.559469 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.608744 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.610443 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.612195 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.615555 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.615786 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-z777b" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.615806 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.620645 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.621848 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.693162 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.775881 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.775938 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.775964 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-kolla-config\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.776007 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-generated\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.776041 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.776083 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-default\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.776125 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwlrz\" (UniqueName: \"kubernetes.io/projected/acbc91d6-2b39-4663-9501-ee36fda433ed-kube-api-access-mwlrz\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.776160 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-operator-scripts\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878344 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878385 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-kolla-config\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878680 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-generated\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878728 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878778 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-default\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878861 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwlrz\" (UniqueName: \"kubernetes.io/projected/acbc91d6-2b39-4663-9501-ee36fda433ed-kube-api-access-mwlrz\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878934 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-operator-scripts\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.878983 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.879727 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-generated\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.880360 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.880857 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-config-data-default\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.880420 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-kolla-config\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.882792 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acbc91d6-2b39-4663-9501-ee36fda433ed-operator-scripts\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.887816 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.891673 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acbc91d6-2b39-4663-9501-ee36fda433ed-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.897512 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwlrz\" (UniqueName: \"kubernetes.io/projected/acbc91d6-2b39-4663-9501-ee36fda433ed-kube-api-access-mwlrz\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.907539 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"acbc91d6-2b39-4663-9501-ee36fda433ed\") " pod="openstack/openstack-galera-0" Nov 26 11:25:28 crc kubenswrapper[4622]: I1126 11:25:28.928835 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 26 11:25:29 crc kubenswrapper[4622]: I1126 11:25:29.090161 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:25:29 crc kubenswrapper[4622]: I1126 11:25:29.404215 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerStarted","Data":"258458f19cadb76c3baa8e12bf24bb570c4feb6801ef0f531fee545774a2c45c"} Nov 26 11:25:29 crc kubenswrapper[4622]: I1126 11:25:29.993868 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 26 11:25:29 crc kubenswrapper[4622]: I1126 11:25:29.995712 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.000955 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.001610 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-c4jxt" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.002308 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.004445 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.007280 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012341 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012370 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012416 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012434 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012484 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/395125e4-6b50-4032-811d-b474b647ed88-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012541 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.012927 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq5zk\" (UniqueName: \"kubernetes.io/projected/395125e4-6b50-4032-811d-b474b647ed88-kube-api-access-mq5zk\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114206 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq5zk\" (UniqueName: \"kubernetes.io/projected/395125e4-6b50-4032-811d-b474b647ed88-kube-api-access-mq5zk\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114300 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114329 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114350 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114367 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114383 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114400 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/395125e4-6b50-4032-811d-b474b647ed88-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.114421 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.118910 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.119525 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.120072 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.120749 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/395125e4-6b50-4032-811d-b474b647ed88-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.121138 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.122654 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/395125e4-6b50-4032-811d-b474b647ed88-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.123176 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/395125e4-6b50-4032-811d-b474b647ed88-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.147298 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq5zk\" (UniqueName: \"kubernetes.io/projected/395125e4-6b50-4032-811d-b474b647ed88-kube-api-access-mq5zk\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.149370 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"395125e4-6b50-4032-811d-b474b647ed88\") " pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.324009 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.383234 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.384285 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.392608 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-zhsvq" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.392809 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.392951 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.397802 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.419524 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-kolla-config\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.419596 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-memcached-tls-certs\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.419625 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-combined-ca-bundle\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.419650 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-config-data\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.419754 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xbpx\" (UniqueName: \"kubernetes.io/projected/30b6a0c4-d329-474a-a14e-8068b059d893-kube-api-access-6xbpx\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.521793 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xbpx\" (UniqueName: \"kubernetes.io/projected/30b6a0c4-d329-474a-a14e-8068b059d893-kube-api-access-6xbpx\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.521953 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-kolla-config\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.522022 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-memcached-tls-certs\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.522058 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-combined-ca-bundle\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.522100 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-config-data\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.522953 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-kolla-config\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.523150 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/30b6a0c4-d329-474a-a14e-8068b059d893-config-data\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.527364 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-memcached-tls-certs\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.531164 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b6a0c4-d329-474a-a14e-8068b059d893-combined-ca-bundle\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.537513 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xbpx\" (UniqueName: \"kubernetes.io/projected/30b6a0c4-d329-474a-a14e-8068b059d893-kube-api-access-6xbpx\") pod \"memcached-0\" (UID: \"30b6a0c4-d329-474a-a14e-8068b059d893\") " pod="openstack/memcached-0" Nov 26 11:25:30 crc kubenswrapper[4622]: I1126 11:25:30.711171 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.426549 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.429038 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.432205 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-wqnwv" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.451094 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerStarted","Data":"537302411caf0095aac666ffd35217cbc164e6a161750f2fe19319ad0662982e"} Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.455814 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.560259 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs47f\" (UniqueName: \"kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f\") pod \"kube-state-metrics-0\" (UID: \"20655504-f2d9-4a76-b534-b479e7660957\") " pod="openstack/kube-state-metrics-0" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.664030 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs47f\" (UniqueName: \"kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f\") pod \"kube-state-metrics-0\" (UID: \"20655504-f2d9-4a76-b534-b479e7660957\") " pod="openstack/kube-state-metrics-0" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.681069 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs47f\" (UniqueName: \"kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f\") pod \"kube-state-metrics-0\" (UID: \"20655504-f2d9-4a76-b534-b479e7660957\") " pod="openstack/kube-state-metrics-0" Nov 26 11:25:32 crc kubenswrapper[4622]: I1126 11:25:32.757271 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:25:35 crc kubenswrapper[4622]: I1126 11:25:35.874526 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:35 crc kubenswrapper[4622]: I1126 11:25:35.876243 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:35 crc kubenswrapper[4622]: I1126 11:25:35.886762 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.021634 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t8bq\" (UniqueName: \"kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.021706 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.021872 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.123493 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.123616 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t8bq\" (UniqueName: \"kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.123680 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.123998 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.124083 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.146881 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t8bq\" (UniqueName: \"kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq\") pod \"certified-operators-sfxnc\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.188626 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8n2wh"] Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.189595 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.192049 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-xfzh4" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.192232 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.192308 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.194411 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.202477 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-mj2jt"] Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.204744 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.215280 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh"] Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.227792 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-run\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.227834 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb5cj\" (UniqueName: \"kubernetes.io/projected/90daf55b-343c-4b1f-990f-1ad602050cf9-kube-api-access-rb5cj\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.227859 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-etc-ovs\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.228435 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-log\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.228610 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-lib\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.228650 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90daf55b-343c-4b1f-990f-1ad602050cf9-scripts\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.233557 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mj2jt"] Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329771 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329818 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-ovn-controller-tls-certs\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329849 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-log\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329867 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329886 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7p9j\" (UniqueName: \"kubernetes.io/projected/1fba939c-4058-428f-a359-ea4e031e9fb3-kube-api-access-r7p9j\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329930 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-lib\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329952 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90daf55b-343c-4b1f-990f-1ad602050cf9-scripts\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329969 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-log-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.329992 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fba939c-4058-428f-a359-ea4e031e9fb3-scripts\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330015 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-run\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330032 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb5cj\" (UniqueName: \"kubernetes.io/projected/90daf55b-343c-4b1f-990f-1ad602050cf9-kube-api-access-rb5cj\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330048 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-etc-ovs\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330096 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-combined-ca-bundle\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330689 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-log\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330771 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-lib\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.330941 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-var-run\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.331405 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/90daf55b-343c-4b1f-990f-1ad602050cf9-etc-ovs\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.332463 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90daf55b-343c-4b1f-990f-1ad602050cf9-scripts\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.347582 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb5cj\" (UniqueName: \"kubernetes.io/projected/90daf55b-343c-4b1f-990f-1ad602050cf9-kube-api-access-rb5cj\") pod \"ovn-controller-ovs-mj2jt\" (UID: \"90daf55b-343c-4b1f-990f-1ad602050cf9\") " pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.430836 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-combined-ca-bundle\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.430889 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.430918 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-ovn-controller-tls-certs\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.430942 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.430964 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7p9j\" (UniqueName: \"kubernetes.io/projected/1fba939c-4058-428f-a359-ea4e031e9fb3-kube-api-access-r7p9j\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.431023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-log-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.431063 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fba939c-4058-428f-a359-ea4e031e9fb3-scripts\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.431481 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-log-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.431534 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run-ovn\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.431924 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1fba939c-4058-428f-a359-ea4e031e9fb3-var-run\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.432954 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fba939c-4058-428f-a359-ea4e031e9fb3-scripts\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.435022 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-combined-ca-bundle\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.442083 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fba939c-4058-428f-a359-ea4e031e9fb3-ovn-controller-tls-certs\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.448109 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7p9j\" (UniqueName: \"kubernetes.io/projected/1fba939c-4058-428f-a359-ea4e031e9fb3-kube-api-access-r7p9j\") pod \"ovn-controller-8n2wh\" (UID: \"1fba939c-4058-428f-a359-ea4e031e9fb3\") " pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.511875 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:36 crc kubenswrapper[4622]: I1126 11:25:36.521818 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.087836 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.090422 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.096882 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.097175 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.097318 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-w46dj" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.097469 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.097704 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.103229 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250297 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250442 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250464 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x7xw\" (UniqueName: \"kubernetes.io/projected/677a4ff3-7482-47f3-8f6d-6d96617fc000-kube-api-access-4x7xw\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250557 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250631 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250714 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-config\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250751 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.250925 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.354929 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.354985 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x7xw\" (UniqueName: \"kubernetes.io/projected/677a4ff3-7482-47f3-8f6d-6d96617fc000-kube-api-access-4x7xw\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355073 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355167 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355212 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-config\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355257 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355285 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355308 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.355707 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.356234 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.356368 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-config\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.356706 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/677a4ff3-7482-47f3-8f6d-6d96617fc000-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.365447 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.365733 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.366197 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677a4ff3-7482-47f3-8f6d-6d96617fc000-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.373028 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x7xw\" (UniqueName: \"kubernetes.io/projected/677a4ff3-7482-47f3-8f6d-6d96617fc000-kube-api-access-4x7xw\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.374355 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"677a4ff3-7482-47f3-8f6d-6d96617fc000\") " pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:37 crc kubenswrapper[4622]: I1126 11:25:37.417094 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:38 crc kubenswrapper[4622]: I1126 11:25:38.583221 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 26 11:25:38 crc kubenswrapper[4622]: E1126 11:25:38.877399 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:18f8463fe46fe6081d5682009e92bbcb3df33282b83b0a2857abaece795cf1ba" Nov 26 11:25:38 crc kubenswrapper[4622]: E1126 11:25:38.877879 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:18f8463fe46fe6081d5682009e92bbcb3df33282b83b0a2857abaece795cf1ba,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4jn64,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6584b49599-svnvg_openstack(d5de5130-5076-495f-bc4a-4ea1e657df5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 26 11:25:38 crc kubenswrapper[4622]: E1126 11:25:38.879982 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6584b49599-svnvg" podUID="d5de5130-5076-495f-bc4a-4ea1e657df5e" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.476765 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.478682 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.481080 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.481107 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ldsbm" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.481433 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.481849 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.483628 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.510256 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.510300 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.510323 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.510350 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.514775 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gq5r\" (UniqueName: \"kubernetes.io/projected/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-kube-api-access-7gq5r\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.514878 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.514983 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.515188 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617216 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617270 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617311 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617346 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617393 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gq5r\" (UniqueName: \"kubernetes.io/projected/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-kube-api-access-7gq5r\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617428 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617478 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617514 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.617953 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.618455 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.619019 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.619435 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.623194 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.623933 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.628334 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.633964 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gq5r\" (UniqueName: \"kubernetes.io/projected/781b9fbd-6f1d-48f9-be0a-8c15276f21a8-kube-api-access-7gq5r\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.636385 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"781b9fbd-6f1d-48f9-be0a-8c15276f21a8\") " pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.801716 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.901413 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.924922 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config\") pod \"d5de5130-5076-495f-bc4a-4ea1e657df5e\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.925019 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc\") pod \"d5de5130-5076-495f-bc4a-4ea1e657df5e\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.925099 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jn64\" (UniqueName: \"kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64\") pod \"d5de5130-5076-495f-bc4a-4ea1e657df5e\" (UID: \"d5de5130-5076-495f-bc4a-4ea1e657df5e\") " Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.925542 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d5de5130-5076-495f-bc4a-4ea1e657df5e" (UID: "d5de5130-5076-495f-bc4a-4ea1e657df5e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.925592 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config" (OuterVolumeSpecName: "config") pod "d5de5130-5076-495f-bc4a-4ea1e657df5e" (UID: "d5de5130-5076-495f-bc4a-4ea1e657df5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:39 crc kubenswrapper[4622]: I1126 11:25:39.929025 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64" (OuterVolumeSpecName: "kube-api-access-4jn64") pod "d5de5130-5076-495f-bc4a-4ea1e657df5e" (UID: "d5de5130-5076-495f-bc4a-4ea1e657df5e"). InnerVolumeSpecName "kube-api-access-4jn64". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.029583 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.029638 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jn64\" (UniqueName: \"kubernetes.io/projected/d5de5130-5076-495f-bc4a-4ea1e657df5e-kube-api-access-4jn64\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.029651 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5de5130-5076-495f-bc4a-4ea1e657df5e-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.227123 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 26 11:25:40 crc kubenswrapper[4622]: E1126 11:25:40.334173 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3dc7c4a_baf9_4368_9c24_b9c4b82a102f.slice/crio-conmon-37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod968b0117_b325_47ff_aba1_87eaf3d326ad.slice/crio-conmon-ae65043521d34f6c8365bd992bd303ea0347019cd842dbbb35649250a7b14519.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3dc7c4a_baf9_4368_9c24_b9c4b82a102f.slice/crio-37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205.scope\": RecentStats: unable to find data in memory cache]" Nov 26 11:25:40 crc kubenswrapper[4622]: W1126 11:25:40.384203 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacbc91d6_2b39_4663_9501_ee36fda433ed.slice/crio-118e2212fcdeea69b2364b6ea71fb41c240d875846a8f2563d4d84c3e190bdc9 WatchSource:0}: Error finding container 118e2212fcdeea69b2364b6ea71fb41c240d875846a8f2563d4d84c3e190bdc9: Status 404 returned error can't find the container with id 118e2212fcdeea69b2364b6ea71fb41c240d875846a8f2563d4d84c3e190bdc9 Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.386295 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.458287 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh"] Nov 26 11:25:40 crc kubenswrapper[4622]: W1126 11:25:40.462695 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20655504_f2d9_4a76_b534_b479e7660957.slice/crio-debfb6a58a932f5bc26787f924989c5177276e087c187122e3a6396cf8824efd WatchSource:0}: Error finding container debfb6a58a932f5bc26787f924989c5177276e087c187122e3a6396cf8824efd: Status 404 returned error can't find the container with id debfb6a58a932f5bc26787f924989c5177276e087c187122e3a6396cf8824efd Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.463329 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:25:40 crc kubenswrapper[4622]: W1126 11:25:40.464335 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fba939c_4058_428f_a359_ea4e031e9fb3.slice/crio-744e2c8012c839584c3434f3270b8fe3ea574f42ca976a6a495bdd9035f59421 WatchSource:0}: Error finding container 744e2c8012c839584c3434f3270b8fe3ea574f42ca976a6a495bdd9035f59421: Status 404 returned error can't find the container with id 744e2c8012c839584c3434f3270b8fe3ea574f42ca976a6a495bdd9035f59421 Nov 26 11:25:40 crc kubenswrapper[4622]: W1126 11:25:40.465850 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63704728_b90b_436c_9d93_41bc13f47765.slice/crio-560c78cfe007e8e0c2fc818a3cb913560cc7b04be4434575649e9bea02c5febd WatchSource:0}: Error finding container 560c78cfe007e8e0c2fc818a3cb913560cc7b04be4434575649e9bea02c5febd: Status 404 returned error can't find the container with id 560c78cfe007e8e0c2fc818a3cb913560cc7b04be4434575649e9bea02c5febd Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.468630 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.517041 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerStarted","Data":"560c78cfe007e8e0c2fc818a3cb913560cc7b04be4434575649e9bea02c5febd"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.521025 4622 generic.go:334] "Generic (PLEG): container finished" podID="33d73b3a-6a70-461f-8dba-d76ebcb8f315" containerID="5684a1e43dc3628ca0f159fabc9a682fdf863eedd5f843fb93ba870ce225f908" exitCode=0 Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.521124 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" event={"ID":"33d73b3a-6a70-461f-8dba-d76ebcb8f315","Type":"ContainerDied","Data":"5684a1e43dc3628ca0f159fabc9a682fdf863eedd5f843fb93ba870ce225f908"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.522998 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"acbc91d6-2b39-4663-9501-ee36fda433ed","Type":"ContainerStarted","Data":"118e2212fcdeea69b2364b6ea71fb41c240d875846a8f2563d4d84c3e190bdc9"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.524953 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6584b49599-svnvg" event={"ID":"d5de5130-5076-495f-bc4a-4ea1e657df5e","Type":"ContainerDied","Data":"16a1b8c6b410b973625e6e7eb1e4e928e88e407b9d1d110e7addd47310e70083"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.525002 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6584b49599-svnvg" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.526794 4622 generic.go:334] "Generic (PLEG): container finished" podID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerID="ae65043521d34f6c8365bd992bd303ea0347019cd842dbbb35649250a7b14519" exitCode=0 Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.526835 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" event={"ID":"968b0117-b325-47ff-aba1-87eaf3d326ad","Type":"ContainerDied","Data":"ae65043521d34f6c8365bd992bd303ea0347019cd842dbbb35649250a7b14519"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.530128 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh" event={"ID":"1fba939c-4058-428f-a359-ea4e031e9fb3","Type":"ContainerStarted","Data":"744e2c8012c839584c3434f3270b8fe3ea574f42ca976a6a495bdd9035f59421"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.537282 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20655504-f2d9-4a76-b534-b479e7660957","Type":"ContainerStarted","Data":"debfb6a58a932f5bc26787f924989c5177276e087c187122e3a6396cf8824efd"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.545634 4622 generic.go:334] "Generic (PLEG): container finished" podID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerID="37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205" exitCode=0 Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.545709 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" event={"ID":"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f","Type":"ContainerDied","Data":"37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.547261 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"30b6a0c4-d329-474a-a14e-8068b059d893","Type":"ContainerStarted","Data":"a137ad11b5523fb0320e5cbdce7c4e84186013e6974a9436df9426bc9d09b164"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.548372 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"395125e4-6b50-4032-811d-b474b647ed88","Type":"ContainerStarted","Data":"f5b0c049afbd39d1ce1e7a631ee6f06024f3aa1c6cba3d0b52b7fe75245689d8"} Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.586046 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mj2jt"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.611784 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.615923 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6584b49599-svnvg"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.699079 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.724390 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5de5130-5076-495f-bc4a-4ea1e657df5e" path="/var/lib/kubelet/pods/d5de5130-5076-495f-bc4a-4ea1e657df5e/volumes" Nov 26 11:25:40 crc kubenswrapper[4622]: E1126 11:25:40.737992 4622 info.go:109] Failed to get network devices: open /sys/class/net/b2eca531bbe2c44/address: no such file or directory Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.924590 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.947710 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5qc4\" (UniqueName: \"kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4\") pod \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.947879 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config\") pod \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\" (UID: \"33d73b3a-6a70-461f-8dba-d76ebcb8f315\") " Nov 26 11:25:40 crc kubenswrapper[4622]: I1126 11:25:40.954409 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4" (OuterVolumeSpecName: "kube-api-access-m5qc4") pod "33d73b3a-6a70-461f-8dba-d76ebcb8f315" (UID: "33d73b3a-6a70-461f-8dba-d76ebcb8f315"). InnerVolumeSpecName "kube-api-access-m5qc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.045198 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config" (OuterVolumeSpecName: "config") pod "33d73b3a-6a70-461f-8dba-d76ebcb8f315" (UID: "33d73b3a-6a70-461f-8dba-d76ebcb8f315"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.050379 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5qc4\" (UniqueName: \"kubernetes.io/projected/33d73b3a-6a70-461f-8dba-d76ebcb8f315-kube-api-access-m5qc4\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.050403 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33d73b3a-6a70-461f-8dba-d76ebcb8f315-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.566008 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerStarted","Data":"041b7b43d0fdde5d09001c58a1e433023ac172b898a5c6b1424d9cc25714de94"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.573956 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" event={"ID":"968b0117-b325-47ff-aba1-87eaf3d326ad","Type":"ContainerStarted","Data":"4d08402a434335bd9df5b51afa91e3b69cda08af1dd435705fa3294401429bd9"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.575769 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.577948 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mj2jt" event={"ID":"90daf55b-343c-4b1f-990f-1ad602050cf9","Type":"ContainerStarted","Data":"88aa62dc8134a7b8090564c838d0e0ada37138f23e34d21e9bf2e14163fa6af3"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.583155 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" event={"ID":"33d73b3a-6a70-461f-8dba-d76ebcb8f315","Type":"ContainerDied","Data":"b2eca531bbe2c44930809046e3bc6ef2fc9bcbe59067be32c06cea8bc4b9cd9e"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.583198 4622 scope.go:117] "RemoveContainer" containerID="5684a1e43dc3628ca0f159fabc9a682fdf863eedd5f843fb93ba870ce225f908" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.583307 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdd77c89-6b6bm" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.588702 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerStarted","Data":"5deadae99260bef731001a5f824300cc52b829eee6bb202d2644be4131ea1279"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.591891 4622 generic.go:334] "Generic (PLEG): container finished" podID="63704728-b90b-436c-9d93-41bc13f47765" containerID="d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d" exitCode=0 Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.591947 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerDied","Data":"d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.604821 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"677a4ff3-7482-47f3-8f6d-6d96617fc000","Type":"ContainerStarted","Data":"d19c27b12ec9e80ba2a9c902db16cdb3db3e189e664b4f70d9db0210149a3e53"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.617867 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.623472 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" event={"ID":"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f","Type":"ContainerStarted","Data":"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709"} Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.625264 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.667156 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" podStartSLOduration=3.251042278 podStartE2EDuration="16.667131593s" podCreationTimestamp="2025-11-26 11:25:25 +0000 UTC" firstStartedPulling="2025-11-26 11:25:26.569246635 +0000 UTC m=+886.160458158" lastFinishedPulling="2025-11-26 11:25:39.985335951 +0000 UTC m=+899.576547473" observedRunningTime="2025-11-26 11:25:41.649755731 +0000 UTC m=+901.240967253" watchObservedRunningTime="2025-11-26 11:25:41.667131593 +0000 UTC m=+901.258343104" Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.695361 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.707740 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bdd77c89-6b6bm"] Nov 26 11:25:41 crc kubenswrapper[4622]: I1126 11:25:41.717323 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" podStartSLOduration=3.310132731 podStartE2EDuration="16.717301988s" podCreationTimestamp="2025-11-26 11:25:25 +0000 UTC" firstStartedPulling="2025-11-26 11:25:26.538281324 +0000 UTC m=+886.129492836" lastFinishedPulling="2025-11-26 11:25:39.945450571 +0000 UTC m=+899.536662093" observedRunningTime="2025-11-26 11:25:41.690019576 +0000 UTC m=+901.281231098" watchObservedRunningTime="2025-11-26 11:25:41.717301988 +0000 UTC m=+901.308513510" Nov 26 11:25:41 crc kubenswrapper[4622]: W1126 11:25:41.737392 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod781b9fbd_6f1d_48f9_be0a_8c15276f21a8.slice/crio-464ad3119b6ec783b7406c7a0c19ff8a9c5d09edfb00ba5afe8b54b4433265eb WatchSource:0}: Error finding container 464ad3119b6ec783b7406c7a0c19ff8a9c5d09edfb00ba5afe8b54b4433265eb: Status 404 returned error can't find the container with id 464ad3119b6ec783b7406c7a0c19ff8a9c5d09edfb00ba5afe8b54b4433265eb Nov 26 11:25:42 crc kubenswrapper[4622]: I1126 11:25:42.630611 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"781b9fbd-6f1d-48f9-be0a-8c15276f21a8","Type":"ContainerStarted","Data":"464ad3119b6ec783b7406c7a0c19ff8a9c5d09edfb00ba5afe8b54b4433265eb"} Nov 26 11:25:42 crc kubenswrapper[4622]: I1126 11:25:42.714612 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33d73b3a-6a70-461f-8dba-d76ebcb8f315" path="/var/lib/kubelet/pods/33d73b3a-6a70-461f-8dba-d76ebcb8f315/volumes" Nov 26 11:25:45 crc kubenswrapper[4622]: I1126 11:25:45.200619 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:25:45 crc kubenswrapper[4622]: I1126 11:25:45.201746 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.018695 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.283691 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.331185 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.659407 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mj2jt" event={"ID":"90daf55b-343c-4b1f-990f-1ad602050cf9","Type":"ContainerStarted","Data":"393d9486b432ef1c27edaac893230ec8c4b97e2542298dd96fcb7eb387d0746f"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.661053 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh" event={"ID":"1fba939c-4058-428f-a359-ea4e031e9fb3","Type":"ContainerStarted","Data":"5f9a045b83a7adaadb4e571f85014fa194c34968ff866de65364282cfd46f04f"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.661139 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8n2wh" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.662431 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20655504-f2d9-4a76-b534-b479e7660957","Type":"ContainerStarted","Data":"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.662538 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.663584 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"30b6a0c4-d329-474a-a14e-8068b059d893","Type":"ContainerStarted","Data":"7691736276e4711f14dc0e50fee89c60b5de46c46f00215b1eb39ce306c3764b"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.663629 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.664605 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"395125e4-6b50-4032-811d-b474b647ed88","Type":"ContainerStarted","Data":"2b72e4cf191f6001e12a76c9a06e07dcb0fe57f639e85d94574618c524fcac85"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.665725 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"acbc91d6-2b39-4663-9501-ee36fda433ed","Type":"ContainerStarted","Data":"285bed048cbeef733114f06ae3541fccc30d98745778cca3761d34f57f29f3ed"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.666704 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"677a4ff3-7482-47f3-8f6d-6d96617fc000","Type":"ContainerStarted","Data":"dc07afb7bc873d44277b928195c5d043133dd5ab3f8915d554dca8619d555349"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.668133 4622 generic.go:334] "Generic (PLEG): container finished" podID="63704728-b90b-436c-9d93-41bc13f47765" containerID="7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3" exitCode=0 Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.668177 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerDied","Data":"7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.669625 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"781b9fbd-6f1d-48f9-be0a-8c15276f21a8","Type":"ContainerStarted","Data":"2d9d0276094dea73e6001f5eb77025fb7074acddfbbf385972a2fbd0e574e183"} Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.669756 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="dnsmasq-dns" containerID="cri-o://06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709" gracePeriod=10 Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.716417 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.250169709 podStartE2EDuration="16.716399599s" podCreationTimestamp="2025-11-26 11:25:30 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.439635841 +0000 UTC m=+900.030847363" lastFinishedPulling="2025-11-26 11:25:45.905865731 +0000 UTC m=+905.497077253" observedRunningTime="2025-11-26 11:25:46.709868567 +0000 UTC m=+906.301080089" watchObservedRunningTime="2025-11-26 11:25:46.716399599 +0000 UTC m=+906.307611122" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.742104 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8n2wh" podStartSLOduration=5.023206942 podStartE2EDuration="10.742080992s" podCreationTimestamp="2025-11-26 11:25:36 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.468573296 +0000 UTC m=+900.059784818" lastFinishedPulling="2025-11-26 11:25:46.187447346 +0000 UTC m=+905.778658868" observedRunningTime="2025-11-26 11:25:46.739115777 +0000 UTC m=+906.330327299" watchObservedRunningTime="2025-11-26 11:25:46.742080992 +0000 UTC m=+906.333292514" Nov 26 11:25:46 crc kubenswrapper[4622]: I1126 11:25:46.753994 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.063943271 podStartE2EDuration="14.753977337s" podCreationTimestamp="2025-11-26 11:25:32 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.464828851 +0000 UTC m=+900.056040374" lastFinishedPulling="2025-11-26 11:25:46.154862918 +0000 UTC m=+905.746074440" observedRunningTime="2025-11-26 11:25:46.749205443 +0000 UTC m=+906.340416965" watchObservedRunningTime="2025-11-26 11:25:46.753977337 +0000 UTC m=+906.345188859" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.045972 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.165280 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config\") pod \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.165331 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc\") pod \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.165373 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvpnx\" (UniqueName: \"kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx\") pod \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\" (UID: \"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f\") " Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.171404 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx" (OuterVolumeSpecName: "kube-api-access-bvpnx") pod "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" (UID: "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f"). InnerVolumeSpecName "kube-api-access-bvpnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.195254 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" (UID: "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.196945 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config" (OuterVolumeSpecName: "config") pod "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" (UID: "d3dc7c4a-baf9-4368-9c24-b9c4b82a102f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.267711 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.267744 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.267755 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvpnx\" (UniqueName: \"kubernetes.io/projected/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f-kube-api-access-bvpnx\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.680375 4622 generic.go:334] "Generic (PLEG): container finished" podID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerID="06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709" exitCode=0 Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.680461 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.680494 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" event={"ID":"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f","Type":"ContainerDied","Data":"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709"} Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.682437 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6d9948dc-qbgfc" event={"ID":"d3dc7c4a-baf9-4368-9c24-b9c4b82a102f","Type":"ContainerDied","Data":"347bf97572d017f2c86079e489e8991499b0b286e63577769507b4229563fd86"} Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.682465 4622 scope.go:117] "RemoveContainer" containerID="06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.689256 4622 generic.go:334] "Generic (PLEG): container finished" podID="90daf55b-343c-4b1f-990f-1ad602050cf9" containerID="393d9486b432ef1c27edaac893230ec8c4b97e2542298dd96fcb7eb387d0746f" exitCode=0 Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.689387 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mj2jt" event={"ID":"90daf55b-343c-4b1f-990f-1ad602050cf9","Type":"ContainerDied","Data":"393d9486b432ef1c27edaac893230ec8c4b97e2542298dd96fcb7eb387d0746f"} Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.697012 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerStarted","Data":"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921"} Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.709197 4622 scope.go:117] "RemoveContainer" containerID="37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.733449 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sfxnc" podStartSLOduration=7.138689081 podStartE2EDuration="12.733415343s" podCreationTimestamp="2025-11-26 11:25:35 +0000 UTC" firstStartedPulling="2025-11-26 11:25:41.726384535 +0000 UTC m=+901.317596056" lastFinishedPulling="2025-11-26 11:25:47.321110797 +0000 UTC m=+906.912322318" observedRunningTime="2025-11-26 11:25:47.718542162 +0000 UTC m=+907.309753704" watchObservedRunningTime="2025-11-26 11:25:47.733415343 +0000 UTC m=+907.324626885" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.738628 4622 scope.go:117] "RemoveContainer" containerID="06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709" Nov 26 11:25:47 crc kubenswrapper[4622]: E1126 11:25:47.739123 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709\": container with ID starting with 06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709 not found: ID does not exist" containerID="06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.739191 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709"} err="failed to get container status \"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709\": rpc error: code = NotFound desc = could not find container \"06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709\": container with ID starting with 06d473fb5f8fb935ebfc34cbec0a38a050a3d3c84ddec0cdfdf9fa460c0d5709 not found: ID does not exist" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.739217 4622 scope.go:117] "RemoveContainer" containerID="37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205" Nov 26 11:25:47 crc kubenswrapper[4622]: E1126 11:25:47.739542 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205\": container with ID starting with 37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205 not found: ID does not exist" containerID="37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.739576 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205"} err="failed to get container status \"37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205\": rpc error: code = NotFound desc = could not find container \"37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205\": container with ID starting with 37b6906980ff25ad6f0ee350da1b021be6b322ab854c7a9a61ba8037aa02f205 not found: ID does not exist" Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.740549 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:47 crc kubenswrapper[4622]: I1126 11:25:47.745710 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c6d9948dc-qbgfc"] Nov 26 11:25:48 crc kubenswrapper[4622]: I1126 11:25:48.715308 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" path="/var/lib/kubelet/pods/d3dc7c4a-baf9-4368-9c24-b9c4b82a102f/volumes" Nov 26 11:25:48 crc kubenswrapper[4622]: I1126 11:25:48.716552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mj2jt" event={"ID":"90daf55b-343c-4b1f-990f-1ad602050cf9","Type":"ContainerStarted","Data":"9c41e9ecd52df3a068d86d136a176e1ee3fd8c76c0e7dc09bb3dd7ada0563aae"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.718716 4622 generic.go:334] "Generic (PLEG): container finished" podID="acbc91d6-2b39-4663-9501-ee36fda433ed" containerID="285bed048cbeef733114f06ae3541fccc30d98745778cca3761d34f57f29f3ed" exitCode=0 Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.718786 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"acbc91d6-2b39-4663-9501-ee36fda433ed","Type":"ContainerDied","Data":"285bed048cbeef733114f06ae3541fccc30d98745778cca3761d34f57f29f3ed"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.725062 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"677a4ff3-7482-47f3-8f6d-6d96617fc000","Type":"ContainerStarted","Data":"8eab95386f411cbef7c543d830dea6a1317db570cb9c95b8708414a121dfa518"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.727492 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"781b9fbd-6f1d-48f9-be0a-8c15276f21a8","Type":"ContainerStarted","Data":"e32a83af61aa7513a97bad38a4e29cc20a526b58698541aeeefee7f27875931c"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.733827 4622 generic.go:334] "Generic (PLEG): container finished" podID="395125e4-6b50-4032-811d-b474b647ed88" containerID="2b72e4cf191f6001e12a76c9a06e07dcb0fe57f639e85d94574618c524fcac85" exitCode=0 Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.733978 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"395125e4-6b50-4032-811d-b474b647ed88","Type":"ContainerDied","Data":"2b72e4cf191f6001e12a76c9a06e07dcb0fe57f639e85d94574618c524fcac85"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.743453 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mj2jt" event={"ID":"90daf55b-343c-4b1f-990f-1ad602050cf9","Type":"ContainerStarted","Data":"5f76db6f634ed3401e09afbaa42f2b74bf7ff59b30c0b61bbd6d62405a1b0c0f"} Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.744011 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.744054 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.779237 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.257190545 podStartE2EDuration="11.779216356s" podCreationTimestamp="2025-11-26 11:25:38 +0000 UTC" firstStartedPulling="2025-11-26 11:25:41.752202875 +0000 UTC m=+901.343414397" lastFinishedPulling="2025-11-26 11:25:49.274228686 +0000 UTC m=+908.865440208" observedRunningTime="2025-11-26 11:25:49.756427702 +0000 UTC m=+909.347639223" watchObservedRunningTime="2025-11-26 11:25:49.779216356 +0000 UTC m=+909.370427878" Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.789389 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=5.228195992 podStartE2EDuration="13.789364382s" podCreationTimestamp="2025-11-26 11:25:36 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.714308921 +0000 UTC m=+900.305520443" lastFinishedPulling="2025-11-26 11:25:49.275477311 +0000 UTC m=+908.866688833" observedRunningTime="2025-11-26 11:25:49.788383682 +0000 UTC m=+909.379595204" watchObservedRunningTime="2025-11-26 11:25:49.789364382 +0000 UTC m=+909.380575905" Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.802627 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:49 crc kubenswrapper[4622]: I1126 11:25:49.806536 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-mj2jt" podStartSLOduration=8.266031397 podStartE2EDuration="13.806513327s" podCreationTimestamp="2025-11-26 11:25:36 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.620111259 +0000 UTC m=+900.211322772" lastFinishedPulling="2025-11-26 11:25:46.16059318 +0000 UTC m=+905.751804702" observedRunningTime="2025-11-26 11:25:49.803697544 +0000 UTC m=+909.394909066" watchObservedRunningTime="2025-11-26 11:25:49.806513327 +0000 UTC m=+909.397724850" Nov 26 11:25:50 crc kubenswrapper[4622]: I1126 11:25:50.752617 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"395125e4-6b50-4032-811d-b474b647ed88","Type":"ContainerStarted","Data":"e51c840bb5969252319b25b00305e59a43f9e24c98dbbabe71da54628fadb014"} Nov 26 11:25:50 crc kubenswrapper[4622]: I1126 11:25:50.754469 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"acbc91d6-2b39-4663-9501-ee36fda433ed","Type":"ContainerStarted","Data":"9992f68065799149308ac536e542c31db2a1b419c1bc1319fbefd8e1dadf33f0"} Nov 26 11:25:50 crc kubenswrapper[4622]: I1126 11:25:50.775863 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=16.457663228 podStartE2EDuration="22.775844708s" podCreationTimestamp="2025-11-26 11:25:28 +0000 UTC" firstStartedPulling="2025-11-26 11:25:39.836683682 +0000 UTC m=+899.427895205" lastFinishedPulling="2025-11-26 11:25:46.154865162 +0000 UTC m=+905.746076685" observedRunningTime="2025-11-26 11:25:50.769703801 +0000 UTC m=+910.360915323" watchObservedRunningTime="2025-11-26 11:25:50.775844708 +0000 UTC m=+910.367056230" Nov 26 11:25:50 crc kubenswrapper[4622]: I1126 11:25:50.786633 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.999043331 podStartE2EDuration="23.786616814s" podCreationTimestamp="2025-11-26 11:25:27 +0000 UTC" firstStartedPulling="2025-11-26 11:25:40.386379654 +0000 UTC m=+899.977591175" lastFinishedPulling="2025-11-26 11:25:46.173953136 +0000 UTC m=+905.765164658" observedRunningTime="2025-11-26 11:25:50.784523504 +0000 UTC m=+910.375735026" watchObservedRunningTime="2025-11-26 11:25:50.786616814 +0000 UTC m=+910.377828336" Nov 26 11:25:51 crc kubenswrapper[4622]: I1126 11:25:51.802075 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:51 crc kubenswrapper[4622]: I1126 11:25:51.830860 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.418214 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.418469 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.432926 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:25:52 crc kubenswrapper[4622]: E1126 11:25:52.433231 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="init" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.433249 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="init" Nov 26 11:25:52 crc kubenswrapper[4622]: E1126 11:25:52.433270 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33d73b3a-6a70-461f-8dba-d76ebcb8f315" containerName="init" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.433277 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="33d73b3a-6a70-461f-8dba-d76ebcb8f315" containerName="init" Nov 26 11:25:52 crc kubenswrapper[4622]: E1126 11:25:52.433296 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="dnsmasq-dns" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.433302 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="dnsmasq-dns" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.433448 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="33d73b3a-6a70-461f-8dba-d76ebcb8f315" containerName="init" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.433472 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3dc7c4a-baf9-4368-9c24-b9c4b82a102f" containerName="dnsmasq-dns" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.434462 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.451741 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.452306 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.562176 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ctm4\" (UniqueName: \"kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.562394 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.562528 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.664172 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.664253 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ctm4\" (UniqueName: \"kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.664372 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.664813 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.664906 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.681943 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ctm4\" (UniqueName: \"kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4\") pod \"redhat-marketplace-pz5p4\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.754833 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.768374 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.828704 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 26 11:25:52 crc kubenswrapper[4622]: I1126 11:25:52.848354 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.033016 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ksljn"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.034289 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.036427 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.042840 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ksljn"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.134319 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c65c5f57f-zg6fk"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.135557 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.141666 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.152430 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c65c5f57f-zg6fk"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.177953 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a1057f8-4662-461b-93ba-84576271087b-config\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.178009 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.178147 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-combined-ca-bundle\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.178218 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf5q8\" (UniqueName: \"kubernetes.io/projected/0a1057f8-4662-461b-93ba-84576271087b-kube-api-access-cf5q8\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.178292 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovn-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.178335 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovs-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.229954 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c65c5f57f-zg6fk"] Nov 26 11:25:53 crc kubenswrapper[4622]: E1126 11:25:53.230779 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-79tsl ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" podUID="2554c318-13d7-4605-a426-50d2d1e994e2" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.255328 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.258509 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.263310 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.264197 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.278313 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281429 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281477 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281518 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281611 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-combined-ca-bundle\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281651 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf5q8\" (UniqueName: \"kubernetes.io/projected/0a1057f8-4662-461b-93ba-84576271087b-kube-api-access-cf5q8\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281699 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79tsl\" (UniqueName: \"kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281741 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovn-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281766 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281796 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovs-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.281856 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a1057f8-4662-461b-93ba-84576271087b-config\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.284054 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a1057f8-4662-461b-93ba-84576271087b-config\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.284887 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovn-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.284948 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a1057f8-4662-461b-93ba-84576271087b-ovs-rundir\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.290528 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.297868 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1057f8-4662-461b-93ba-84576271087b-combined-ca-bundle\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.314153 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf5q8\" (UniqueName: \"kubernetes.io/projected/0a1057f8-4662-461b-93ba-84576271087b-kube-api-access-cf5q8\") pod \"ovn-controller-metrics-ksljn\" (UID: \"0a1057f8-4662-461b-93ba-84576271087b\") " pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.358250 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.359726 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.360936 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksljn" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.364130 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.364303 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.364414 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-ttfvg" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.369361 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.375995 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.390702 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.390752 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.390824 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79tsl\" (UniqueName: \"kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.390842 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.390925 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.391107 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.391251 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.391337 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcd5p\" (UniqueName: \"kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.391414 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.394124 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.395041 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.395185 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.419294 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79tsl\" (UniqueName: \"kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl\") pod \"dnsmasq-dns-6c65c5f57f-zg6fk\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493120 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493460 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493600 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcd5p\" (UniqueName: \"kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493642 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxzxh\" (UniqueName: \"kubernetes.io/projected/d6b4db61-f18b-40a7-a97d-17849df44c24-kube-api-access-zxzxh\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493684 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493700 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493735 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493755 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-config\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493783 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493828 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493872 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-scripts\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.493900 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.494816 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.494942 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.495090 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.495583 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.509256 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcd5p\" (UniqueName: \"kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p\") pod \"dnsmasq-dns-5c476d78c5-44ndx\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.573449 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597230 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxzxh\" (UniqueName: \"kubernetes.io/projected/d6b4db61-f18b-40a7-a97d-17849df44c24-kube-api-access-zxzxh\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597727 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597789 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-config\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597869 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-scripts\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597900 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.597996 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.598045 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.598929 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-config\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.599394 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6b4db61-f18b-40a7-a97d-17849df44c24-scripts\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.599394 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.601645 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.602105 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.603878 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b4db61-f18b-40a7-a97d-17849df44c24-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.612193 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxzxh\" (UniqueName: \"kubernetes.io/projected/d6b4db61-f18b-40a7-a97d-17849df44c24-kube-api-access-zxzxh\") pod \"ovn-northd-0\" (UID: \"d6b4db61-f18b-40a7-a97d-17849df44c24\") " pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.697164 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.773855 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ksljn"] Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.795219 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerStarted","Data":"4639ff5a97bfa3658978d3a9c2376b5e88393c31e1c121eb88c9b47edfea38d8"} Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.795453 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.835362 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.903954 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config\") pod \"2554c318-13d7-4605-a426-50d2d1e994e2\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.904006 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc\") pod \"2554c318-13d7-4605-a426-50d2d1e994e2\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.904038 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb\") pod \"2554c318-13d7-4605-a426-50d2d1e994e2\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.904231 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79tsl\" (UniqueName: \"kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl\") pod \"2554c318-13d7-4605-a426-50d2d1e994e2\" (UID: \"2554c318-13d7-4605-a426-50d2d1e994e2\") " Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.904479 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2554c318-13d7-4605-a426-50d2d1e994e2" (UID: "2554c318-13d7-4605-a426-50d2d1e994e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.904540 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2554c318-13d7-4605-a426-50d2d1e994e2" (UID: "2554c318-13d7-4605-a426-50d2d1e994e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.905029 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.905045 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.905914 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config" (OuterVolumeSpecName: "config") pod "2554c318-13d7-4605-a426-50d2d1e994e2" (UID: "2554c318-13d7-4605-a426-50d2d1e994e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.911407 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl" (OuterVolumeSpecName: "kube-api-access-79tsl") pod "2554c318-13d7-4605-a426-50d2d1e994e2" (UID: "2554c318-13d7-4605-a426-50d2d1e994e2"). InnerVolumeSpecName "kube-api-access-79tsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:25:53 crc kubenswrapper[4622]: I1126 11:25:53.966022 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:25:53 crc kubenswrapper[4622]: W1126 11:25:53.968974 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b9f8eb9_3de8_420c_95a7_af73fc7b33b4.slice/crio-d52db22aeabd63db6b2848bb8182dcea5b14a5187f83467672d30309bc1b8797 WatchSource:0}: Error finding container d52db22aeabd63db6b2848bb8182dcea5b14a5187f83467672d30309bc1b8797: Status 404 returned error can't find the container with id d52db22aeabd63db6b2848bb8182dcea5b14a5187f83467672d30309bc1b8797 Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.006813 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79tsl\" (UniqueName: \"kubernetes.io/projected/2554c318-13d7-4605-a426-50d2d1e994e2-kube-api-access-79tsl\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.006850 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2554c318-13d7-4605-a426-50d2d1e994e2-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.116760 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.808060 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" event={"ID":"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4","Type":"ContainerStarted","Data":"d52db22aeabd63db6b2848bb8182dcea5b14a5187f83467672d30309bc1b8797"} Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.809444 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d6b4db61-f18b-40a7-a97d-17849df44c24","Type":"ContainerStarted","Data":"b4859439307a602881bada06147249e0a52fc51719b63af7658dc743b1379f8d"} Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.810972 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c65c5f57f-zg6fk" Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.810976 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksljn" event={"ID":"0a1057f8-4662-461b-93ba-84576271087b","Type":"ContainerStarted","Data":"de02ffbc964f948d4f46e70c267c74af71bf42b38641b20f158e7ec623640b7c"} Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.849182 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c65c5f57f-zg6fk"] Nov 26 11:25:54 crc kubenswrapper[4622]: I1126 11:25:54.856394 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c65c5f57f-zg6fk"] Nov 26 11:25:55 crc kubenswrapper[4622]: I1126 11:25:55.712610 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.195247 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.195654 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.228941 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.718517 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2554c318-13d7-4605-a426-50d2d1e994e2" path="/var/lib/kubelet/pods/2554c318-13d7-4605-a426-50d2d1e994e2/volumes" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.854861 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:56 crc kubenswrapper[4622]: I1126 11:25:56.898111 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.828937 4622 generic.go:334] "Generic (PLEG): container finished" podID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerID="9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f" exitCode=0 Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.828997 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerDied","Data":"9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f"} Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.830528 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksljn" event={"ID":"0a1057f8-4662-461b-93ba-84576271087b","Type":"ContainerStarted","Data":"7534c3682d53ecaf985d3aa270f8de210feee277577fdd0e9bbd75deb3a8ef24"} Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.832392 4622 generic.go:334] "Generic (PLEG): container finished" podID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerID="9fbf402d2fc3e031a7ccc56c7097aa3524374c4b432d7196cc0fe28600c6638c" exitCode=0 Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.833346 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" event={"ID":"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4","Type":"ContainerDied","Data":"9fbf402d2fc3e031a7ccc56c7097aa3524374c4b432d7196cc0fe28600c6638c"} Nov 26 11:25:57 crc kubenswrapper[4622]: I1126 11:25:57.863867 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ksljn" podStartSLOduration=4.863846859 podStartE2EDuration="4.863846859s" podCreationTimestamp="2025-11-26 11:25:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:25:57.856434613 +0000 UTC m=+917.447646135" watchObservedRunningTime="2025-11-26 11:25:57.863846859 +0000 UTC m=+917.455058380" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.840158 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" event={"ID":"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4","Type":"ContainerStarted","Data":"5d6e364f5d8aac961d5db3f6ff3e6343cc678fab8a13e530a04e91ea70b3d6e4"} Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.840647 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.842810 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d6b4db61-f18b-40a7-a97d-17849df44c24","Type":"ContainerStarted","Data":"f680cfc96dd75cde5e23f80576b90838216f27b9759b260b0d6b02d3af697880"} Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.842839 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d6b4db61-f18b-40a7-a97d-17849df44c24","Type":"ContainerStarted","Data":"e0deab67fff9f99ab0984c16797206dc25359a469523feadaf85421c885dd820"} Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.842972 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.842972 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sfxnc" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="registry-server" containerID="cri-o://79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921" gracePeriod=2 Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.854416 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" podStartSLOduration=5.854392816 podStartE2EDuration="5.854392816s" podCreationTimestamp="2025-11-26 11:25:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:25:58.853523635 +0000 UTC m=+918.444735158" watchObservedRunningTime="2025-11-26 11:25:58.854392816 +0000 UTC m=+918.445604338" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.878935 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.9795607450000001 podStartE2EDuration="5.878919082s" podCreationTimestamp="2025-11-26 11:25:53 +0000 UTC" firstStartedPulling="2025-11-26 11:25:54.127307402 +0000 UTC m=+913.718518914" lastFinishedPulling="2025-11-26 11:25:58.026665729 +0000 UTC m=+917.617877251" observedRunningTime="2025-11-26 11:25:58.873791017 +0000 UTC m=+918.465002559" watchObservedRunningTime="2025-11-26 11:25:58.878919082 +0000 UTC m=+918.470130604" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.930279 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.930316 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 26 11:25:58 crc kubenswrapper[4622]: I1126 11:25:58.991981 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.230890 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.312599 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t8bq\" (UniqueName: \"kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq\") pod \"63704728-b90b-436c-9d93-41bc13f47765\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.312669 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content\") pod \"63704728-b90b-436c-9d93-41bc13f47765\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.312705 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities\") pod \"63704728-b90b-436c-9d93-41bc13f47765\" (UID: \"63704728-b90b-436c-9d93-41bc13f47765\") " Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.314068 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities" (OuterVolumeSpecName: "utilities") pod "63704728-b90b-436c-9d93-41bc13f47765" (UID: "63704728-b90b-436c-9d93-41bc13f47765"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.318130 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq" (OuterVolumeSpecName: "kube-api-access-9t8bq") pod "63704728-b90b-436c-9d93-41bc13f47765" (UID: "63704728-b90b-436c-9d93-41bc13f47765"). InnerVolumeSpecName "kube-api-access-9t8bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.351274 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "63704728-b90b-436c-9d93-41bc13f47765" (UID: "63704728-b90b-436c-9d93-41bc13f47765"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.415425 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t8bq\" (UniqueName: \"kubernetes.io/projected/63704728-b90b-436c-9d93-41bc13f47765-kube-api-access-9t8bq\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.415463 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.415474 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63704728-b90b-436c-9d93-41bc13f47765-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.851026 4622 generic.go:334] "Generic (PLEG): container finished" podID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerID="e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1" exitCode=0 Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.851139 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerDied","Data":"e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1"} Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.854491 4622 generic.go:334] "Generic (PLEG): container finished" podID="63704728-b90b-436c-9d93-41bc13f47765" containerID="79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921" exitCode=0 Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.854549 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfxnc" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.854579 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerDied","Data":"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921"} Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.854621 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfxnc" event={"ID":"63704728-b90b-436c-9d93-41bc13f47765","Type":"ContainerDied","Data":"560c78cfe007e8e0c2fc818a3cb913560cc7b04be4434575649e9bea02c5febd"} Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.854640 4622 scope.go:117] "RemoveContainer" containerID="79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.872380 4622 scope.go:117] "RemoveContainer" containerID="7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.885026 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.891939 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sfxnc"] Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.906719 4622 scope.go:117] "RemoveContainer" containerID="d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.922714 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.923449 4622 scope.go:117] "RemoveContainer" containerID="79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921" Nov 26 11:25:59 crc kubenswrapper[4622]: E1126 11:25:59.923935 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921\": container with ID starting with 79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921 not found: ID does not exist" containerID="79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.923967 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921"} err="failed to get container status \"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921\": rpc error: code = NotFound desc = could not find container \"79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921\": container with ID starting with 79faf6d251a551a0d0430b946e8a2c283620728b684a0d518543c06211fdc921 not found: ID does not exist" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.923992 4622 scope.go:117] "RemoveContainer" containerID="7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3" Nov 26 11:25:59 crc kubenswrapper[4622]: E1126 11:25:59.925050 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3\": container with ID starting with 7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3 not found: ID does not exist" containerID="7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.925120 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3"} err="failed to get container status \"7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3\": rpc error: code = NotFound desc = could not find container \"7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3\": container with ID starting with 7ab4dae7cbc30856f178f131fee80ab8de8530484382299fe0c9be13a2f0a5e3 not found: ID does not exist" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.925160 4622 scope.go:117] "RemoveContainer" containerID="d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d" Nov 26 11:25:59 crc kubenswrapper[4622]: E1126 11:25:59.925549 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d\": container with ID starting with d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d not found: ID does not exist" containerID="d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d" Nov 26 11:25:59 crc kubenswrapper[4622]: I1126 11:25:59.925581 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d"} err="failed to get container status \"d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d\": rpc error: code = NotFound desc = could not find container \"d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d\": container with ID starting with d1d3cdd015d55def7f60721da6e0a63f21c114987ff672d025d2306703e6400d not found: ID does not exist" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.324315 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.325136 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.373382 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9cd0-account-create-update-2jfbk"] Nov 26 11:26:00 crc kubenswrapper[4622]: E1126 11:26:00.373763 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="extract-content" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.373782 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="extract-content" Nov 26 11:26:00 crc kubenswrapper[4622]: E1126 11:26:00.373800 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="registry-server" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.373807 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="registry-server" Nov 26 11:26:00 crc kubenswrapper[4622]: E1126 11:26:00.373819 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="extract-utilities" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.373825 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="extract-utilities" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.374017 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="63704728-b90b-436c-9d93-41bc13f47765" containerName="registry-server" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.374626 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.378551 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.392712 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9cd0-account-create-update-2jfbk"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.399022 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-6hk2z"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.400362 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.406036 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6hk2z"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.425213 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.537141 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.537212 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.537356 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25p89\" (UniqueName: \"kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.537399 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7s8g\" (UniqueName: \"kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.639073 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.639802 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.639942 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25p89\" (UniqueName: \"kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.639989 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7s8g\" (UniqueName: \"kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.640383 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.641002 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.659210 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25p89\" (UniqueName: \"kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89\") pod \"keystone-9cd0-account-create-update-2jfbk\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.667990 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7s8g\" (UniqueName: \"kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g\") pod \"keystone-db-create-6hk2z\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.683865 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6540-account-create-update-dgl4g"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.684974 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.686862 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.694534 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-qz4zz"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.699512 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.705532 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6540-account-create-update-dgl4g"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.720845 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63704728-b90b-436c-9d93-41bc13f47765" path="/var/lib/kubelet/pods/63704728-b90b-436c-9d93-41bc13f47765/volumes" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.722484 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qz4zz"] Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.737142 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.764736 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.844144 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc2x7\" (UniqueName: \"kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.844330 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llp8q\" (UniqueName: \"kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.844619 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.844706 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.874636 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerStarted","Data":"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a"} Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.892973 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pz5p4" podStartSLOduration=6.567100837 podStartE2EDuration="8.892954756s" podCreationTimestamp="2025-11-26 11:25:52 +0000 UTC" firstStartedPulling="2025-11-26 11:25:58.009208991 +0000 UTC m=+917.600420504" lastFinishedPulling="2025-11-26 11:26:00.335062901 +0000 UTC m=+919.926274423" observedRunningTime="2025-11-26 11:26:00.88636906 +0000 UTC m=+920.477580582" watchObservedRunningTime="2025-11-26 11:26:00.892954756 +0000 UTC m=+920.484166278" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.943167 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.946401 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.946476 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.946554 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc2x7\" (UniqueName: \"kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.946681 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llp8q\" (UniqueName: \"kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.947745 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.947874 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.969476 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llp8q\" (UniqueName: \"kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q\") pod \"placement-db-create-qz4zz\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:00 crc kubenswrapper[4622]: I1126 11:26:00.970070 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc2x7\" (UniqueName: \"kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7\") pod \"placement-6540-account-create-update-dgl4g\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.024931 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.028341 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.134861 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9cd0-account-create-update-2jfbk"] Nov 26 11:26:01 crc kubenswrapper[4622]: W1126 11:26:01.139524 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67d9311d_d647_40f4_b291_c3540b41f78c.slice/crio-9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234 WatchSource:0}: Error finding container 9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234: Status 404 returned error can't find the container with id 9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.223830 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6hk2z"] Nov 26 11:26:01 crc kubenswrapper[4622]: W1126 11:26:01.225250 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63a79d2a_f82a_49df_b959_e3c0b61cd34a.slice/crio-9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e WatchSource:0}: Error finding container 9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e: Status 404 returned error can't find the container with id 9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.456861 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qz4zz"] Nov 26 11:26:01 crc kubenswrapper[4622]: W1126 11:26:01.457104 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb095b953_f02f_4a15_b3e0_e698a15b848f.slice/crio-4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4 WatchSource:0}: Error finding container 4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4: Status 404 returned error can't find the container with id 4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.567259 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6540-account-create-update-dgl4g"] Nov 26 11:26:01 crc kubenswrapper[4622]: W1126 11:26:01.623569 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod240474a3_cc8a_4bfd_991d_1e67a6286df3.slice/crio-77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411 WatchSource:0}: Error finding container 77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411: Status 404 returned error can't find the container with id 77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.883533 4622 generic.go:334] "Generic (PLEG): container finished" podID="240474a3-cc8a-4bfd-991d-1e67a6286df3" containerID="4a470a60b171d72d93568e421998add90ec1f5d61d1333577efdae7aabc3d365" exitCode=0 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.883630 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6540-account-create-update-dgl4g" event={"ID":"240474a3-cc8a-4bfd-991d-1e67a6286df3","Type":"ContainerDied","Data":"4a470a60b171d72d93568e421998add90ec1f5d61d1333577efdae7aabc3d365"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.883678 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6540-account-create-update-dgl4g" event={"ID":"240474a3-cc8a-4bfd-991d-1e67a6286df3","Type":"ContainerStarted","Data":"77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.885487 4622 generic.go:334] "Generic (PLEG): container finished" podID="b095b953-f02f-4a15-b3e0-e698a15b848f" containerID="7f420817e41c1a228b0aeb8ce650ca646742a46240c9bec898326fbc92a98d49" exitCode=0 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.885559 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qz4zz" event={"ID":"b095b953-f02f-4a15-b3e0-e698a15b848f","Type":"ContainerDied","Data":"7f420817e41c1a228b0aeb8ce650ca646742a46240c9bec898326fbc92a98d49"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.885577 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qz4zz" event={"ID":"b095b953-f02f-4a15-b3e0-e698a15b848f","Type":"ContainerStarted","Data":"4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.887263 4622 generic.go:334] "Generic (PLEG): container finished" podID="67d9311d-d647-40f4-b291-c3540b41f78c" containerID="a7802c79a3f358342d9f07e9294e0f73b3b43d6578ca27404dd2d3c1e30be7c3" exitCode=0 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.887318 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9cd0-account-create-update-2jfbk" event={"ID":"67d9311d-d647-40f4-b291-c3540b41f78c","Type":"ContainerDied","Data":"a7802c79a3f358342d9f07e9294e0f73b3b43d6578ca27404dd2d3c1e30be7c3"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.887335 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9cd0-account-create-update-2jfbk" event={"ID":"67d9311d-d647-40f4-b291-c3540b41f78c","Type":"ContainerStarted","Data":"9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.889073 4622 generic.go:334] "Generic (PLEG): container finished" podID="63a79d2a-f82a-49df-b959-e3c0b61cd34a" containerID="395b2507a1545ee5fbec9681ab068fefbfcef724bd285b06e54a2480accf6f6e" exitCode=0 Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.889156 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6hk2z" event={"ID":"63a79d2a-f82a-49df-b959-e3c0b61cd34a","Type":"ContainerDied","Data":"395b2507a1545ee5fbec9681ab068fefbfcef724bd285b06e54a2480accf6f6e"} Nov 26 11:26:01 crc kubenswrapper[4622]: I1126 11:26:01.889213 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6hk2z" event={"ID":"63a79d2a-f82a-49df-b959-e3c0b61cd34a","Type":"ContainerStarted","Data":"9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e"} Nov 26 11:26:02 crc kubenswrapper[4622]: I1126 11:26:02.755420 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:02 crc kubenswrapper[4622]: I1126 11:26:02.755752 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:02 crc kubenswrapper[4622]: I1126 11:26:02.790335 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.233477 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.296721 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc2x7\" (UniqueName: \"kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7\") pod \"240474a3-cc8a-4bfd-991d-1e67a6286df3\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.296843 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts\") pod \"240474a3-cc8a-4bfd-991d-1e67a6286df3\" (UID: \"240474a3-cc8a-4bfd-991d-1e67a6286df3\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.297712 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "240474a3-cc8a-4bfd-991d-1e67a6286df3" (UID: "240474a3-cc8a-4bfd-991d-1e67a6286df3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.303664 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7" (OuterVolumeSpecName: "kube-api-access-kc2x7") pod "240474a3-cc8a-4bfd-991d-1e67a6286df3" (UID: "240474a3-cc8a-4bfd-991d-1e67a6286df3"). InnerVolumeSpecName "kube-api-access-kc2x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.361384 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.365389 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.369847 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.399454 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240474a3-cc8a-4bfd-991d-1e67a6286df3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.399481 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc2x7\" (UniqueName: \"kubernetes.io/projected/240474a3-cc8a-4bfd-991d-1e67a6286df3-kube-api-access-kc2x7\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.500945 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts\") pod \"67d9311d-d647-40f4-b291-c3540b41f78c\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501045 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7s8g\" (UniqueName: \"kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g\") pod \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501091 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts\") pod \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\" (UID: \"63a79d2a-f82a-49df-b959-e3c0b61cd34a\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501181 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llp8q\" (UniqueName: \"kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q\") pod \"b095b953-f02f-4a15-b3e0-e698a15b848f\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501209 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25p89\" (UniqueName: \"kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89\") pod \"67d9311d-d647-40f4-b291-c3540b41f78c\" (UID: \"67d9311d-d647-40f4-b291-c3540b41f78c\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501280 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts\") pod \"b095b953-f02f-4a15-b3e0-e698a15b848f\" (UID: \"b095b953-f02f-4a15-b3e0-e698a15b848f\") " Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501441 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "67d9311d-d647-40f4-b291-c3540b41f78c" (UID: "67d9311d-d647-40f4-b291-c3540b41f78c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501730 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63a79d2a-f82a-49df-b959-e3c0b61cd34a" (UID: "63a79d2a-f82a-49df-b959-e3c0b61cd34a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.501937 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b095b953-f02f-4a15-b3e0-e698a15b848f" (UID: "b095b953-f02f-4a15-b3e0-e698a15b848f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.502139 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b095b953-f02f-4a15-b3e0-e698a15b848f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.502161 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67d9311d-d647-40f4-b291-c3540b41f78c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.502170 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a79d2a-f82a-49df-b959-e3c0b61cd34a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.504582 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89" (OuterVolumeSpecName: "kube-api-access-25p89") pod "67d9311d-d647-40f4-b291-c3540b41f78c" (UID: "67d9311d-d647-40f4-b291-c3540b41f78c"). InnerVolumeSpecName "kube-api-access-25p89". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.504870 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g" (OuterVolumeSpecName: "kube-api-access-w7s8g") pod "63a79d2a-f82a-49df-b959-e3c0b61cd34a" (UID: "63a79d2a-f82a-49df-b959-e3c0b61cd34a"). InnerVolumeSpecName "kube-api-access-w7s8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.505894 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q" (OuterVolumeSpecName: "kube-api-access-llp8q") pod "b095b953-f02f-4a15-b3e0-e698a15b848f" (UID: "b095b953-f02f-4a15-b3e0-e698a15b848f"). InnerVolumeSpecName "kube-api-access-llp8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.575685 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.604723 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llp8q\" (UniqueName: \"kubernetes.io/projected/b095b953-f02f-4a15-b3e0-e698a15b848f-kube-api-access-llp8q\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.604756 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25p89\" (UniqueName: \"kubernetes.io/projected/67d9311d-d647-40f4-b291-c3540b41f78c-kube-api-access-25p89\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.604767 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7s8g\" (UniqueName: \"kubernetes.io/projected/63a79d2a-f82a-49df-b959-e3c0b61cd34a-kube-api-access-w7s8g\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.623172 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.623422 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="dnsmasq-dns" containerID="cri-o://4d08402a434335bd9df5b51afa91e3b69cda08af1dd435705fa3294401429bd9" gracePeriod=10 Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.903773 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9cd0-account-create-update-2jfbk" event={"ID":"67d9311d-d647-40f4-b291-c3540b41f78c","Type":"ContainerDied","Data":"9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234"} Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.903809 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9cd0-account-create-update-2jfbk" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.903817 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b85b99d8569fd22c58fb040e62a6ab171890a6c9df08be06512c707f9f1f234" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.906070 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6hk2z" event={"ID":"63a79d2a-f82a-49df-b959-e3c0b61cd34a","Type":"ContainerDied","Data":"9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e"} Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.906119 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ab3837e9b908816125ae485052ae33ff1011a6d7b753e3d1fb46cb76ed9928e" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.906139 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6hk2z" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.907808 4622 generic.go:334] "Generic (PLEG): container finished" podID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerID="4d08402a434335bd9df5b51afa91e3b69cda08af1dd435705fa3294401429bd9" exitCode=0 Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.907855 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" event={"ID":"968b0117-b325-47ff-aba1-87eaf3d326ad","Type":"ContainerDied","Data":"4d08402a434335bd9df5b51afa91e3b69cda08af1dd435705fa3294401429bd9"} Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.909038 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6540-account-create-update-dgl4g" event={"ID":"240474a3-cc8a-4bfd-991d-1e67a6286df3","Type":"ContainerDied","Data":"77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411"} Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.909073 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77a548a61ed332475db18f1765006a91cc8a36d04cca9de3a326edbf13b9e411" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.909051 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6540-account-create-update-dgl4g" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.910988 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qz4zz" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.919110 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qz4zz" event={"ID":"b095b953-f02f-4a15-b3e0-e698a15b848f","Type":"ContainerDied","Data":"4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4"} Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.919139 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d57e602c5dd578504e9411c4fc7ebbff185cfb2191b6ffcb5d0d0cdf3ea14b4" Nov 26 11:26:03 crc kubenswrapper[4622]: I1126 11:26:03.998259 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.121770 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8h5dh\" (UniqueName: \"kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh\") pod \"968b0117-b325-47ff-aba1-87eaf3d326ad\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.121895 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config\") pod \"968b0117-b325-47ff-aba1-87eaf3d326ad\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.121966 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc\") pod \"968b0117-b325-47ff-aba1-87eaf3d326ad\" (UID: \"968b0117-b325-47ff-aba1-87eaf3d326ad\") " Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.126760 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh" (OuterVolumeSpecName: "kube-api-access-8h5dh") pod "968b0117-b325-47ff-aba1-87eaf3d326ad" (UID: "968b0117-b325-47ff-aba1-87eaf3d326ad"). InnerVolumeSpecName "kube-api-access-8h5dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.151348 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "968b0117-b325-47ff-aba1-87eaf3d326ad" (UID: "968b0117-b325-47ff-aba1-87eaf3d326ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.152607 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config" (OuterVolumeSpecName: "config") pod "968b0117-b325-47ff-aba1-87eaf3d326ad" (UID: "968b0117-b325-47ff-aba1-87eaf3d326ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.223955 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.224012 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8h5dh\" (UniqueName: \"kubernetes.io/projected/968b0117-b325-47ff-aba1-87eaf3d326ad-kube-api-access-8h5dh\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.224032 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968b0117-b325-47ff-aba1-87eaf3d326ad-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.918105 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" event={"ID":"968b0117-b325-47ff-aba1-87eaf3d326ad","Type":"ContainerDied","Data":"03e4ccfa2106342c55431a376e13d20f003d6cfd9b2f81264d5fea31e9d893ad"} Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.918159 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6486446b9f-qm4ht" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.918369 4622 scope.go:117] "RemoveContainer" containerID="4d08402a434335bd9df5b51afa91e3b69cda08af1dd435705fa3294401429bd9" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.932308 4622 scope.go:117] "RemoveContainer" containerID="ae65043521d34f6c8365bd992bd303ea0347019cd842dbbb35649250a7b14519" Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.933973 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:26:04 crc kubenswrapper[4622]: I1126 11:26:04.939485 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6486446b9f-qm4ht"] Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218415 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218817 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="240474a3-cc8a-4bfd-991d-1e67a6286df3" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218838 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="240474a3-cc8a-4bfd-991d-1e67a6286df3" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218861 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b095b953-f02f-4a15-b3e0-e698a15b848f" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218875 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b095b953-f02f-4a15-b3e0-e698a15b848f" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218882 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67d9311d-d647-40f4-b291-c3540b41f78c" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218888 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="67d9311d-d647-40f4-b291-c3540b41f78c" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218908 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="init" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218913 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="init" Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218929 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="dnsmasq-dns" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218934 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="dnsmasq-dns" Nov 26 11:26:05 crc kubenswrapper[4622]: E1126 11:26:05.218942 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a79d2a-f82a-49df-b959-e3c0b61cd34a" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.218948 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a79d2a-f82a-49df-b959-e3c0b61cd34a" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.219157 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="67d9311d-d647-40f4-b291-c3540b41f78c" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.219174 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" containerName="dnsmasq-dns" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.219182 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="240474a3-cc8a-4bfd-991d-1e67a6286df3" containerName="mariadb-account-create-update" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.219200 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a79d2a-f82a-49df-b959-e3c0b61cd34a" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.219211 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b095b953-f02f-4a15-b3e0-e698a15b848f" containerName="mariadb-database-create" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.220427 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.227113 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.343441 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.343552 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.343723 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j4fx\" (UniqueName: \"kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.446107 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j4fx\" (UniqueName: \"kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.446524 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.446646 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.447071 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.447190 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.464630 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j4fx\" (UniqueName: \"kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx\") pod \"community-operators-zfq2z\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.533207 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.877196 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-zxx5f"] Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.878173 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.886304 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zxx5f"] Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.932560 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:05 crc kubenswrapper[4622]: W1126 11:26:05.939454 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02f6d949_d899_4367_9648_006b5fdb26e4.slice/crio-0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d WatchSource:0}: Error finding container 0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d: Status 404 returned error can't find the container with id 0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.956847 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm49b\" (UniqueName: \"kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.957096 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.976269 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-89f8-account-create-update-5chl8"] Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.977559 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.982466 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 26 11:26:05 crc kubenswrapper[4622]: I1126 11:26:05.984791 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-89f8-account-create-update-5chl8"] Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.059071 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm49b\" (UniqueName: \"kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.059132 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k4cp\" (UniqueName: \"kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.059160 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.059424 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.060815 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.078357 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm49b\" (UniqueName: \"kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b\") pod \"glance-db-create-zxx5f\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.162209 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k4cp\" (UniqueName: \"kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.162283 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.163264 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.177647 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k4cp\" (UniqueName: \"kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp\") pod \"glance-89f8-account-create-update-5chl8\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.192689 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.339482 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.582735 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zxx5f"] Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.735623 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="968b0117-b325-47ff-aba1-87eaf3d326ad" path="/var/lib/kubelet/pods/968b0117-b325-47ff-aba1-87eaf3d326ad/volumes" Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.738609 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-89f8-account-create-update-5chl8"] Nov 26 11:26:06 crc kubenswrapper[4622]: W1126 11:26:06.741998 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8746b761_9144_452e_aa5c_4c4ca60b03ad.slice/crio-df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231 WatchSource:0}: Error finding container df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231: Status 404 returned error can't find the container with id df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231 Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.935367 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-89f8-account-create-update-5chl8" event={"ID":"8746b761-9144-452e-aa5c-4c4ca60b03ad","Type":"ContainerStarted","Data":"da0c22113970c5ace2726fb60821d5564da901faf8ce88a8881313486d7dc926"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.935876 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-89f8-account-create-update-5chl8" event={"ID":"8746b761-9144-452e-aa5c-4c4ca60b03ad","Type":"ContainerStarted","Data":"df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.937547 4622 generic.go:334] "Generic (PLEG): container finished" podID="7e816521-5607-43d2-89b6-d8643c61eb01" containerID="d4745f4ad95fab6ede096d0eaa7dcbe6208f708deb300d93b1cfc23866c1ecf1" exitCode=0 Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.937601 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zxx5f" event={"ID":"7e816521-5607-43d2-89b6-d8643c61eb01","Type":"ContainerDied","Data":"d4745f4ad95fab6ede096d0eaa7dcbe6208f708deb300d93b1cfc23866c1ecf1"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.937632 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zxx5f" event={"ID":"7e816521-5607-43d2-89b6-d8643c61eb01","Type":"ContainerStarted","Data":"320855838fe4e370bc87ea441303264263c518ef8f3c6a0b3edaaeb9700b5ca8"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.939458 4622 generic.go:334] "Generic (PLEG): container finished" podID="02f6d949-d899-4367-9648-006b5fdb26e4" containerID="6a9788b70c533f22ff68cc7de8b27db1f9358b2b93f9f9f49b6381dc67dc804b" exitCode=0 Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.939496 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerDied","Data":"6a9788b70c533f22ff68cc7de8b27db1f9358b2b93f9f9f49b6381dc67dc804b"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.939552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerStarted","Data":"0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d"} Nov 26 11:26:06 crc kubenswrapper[4622]: I1126 11:26:06.960569 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-89f8-account-create-update-5chl8" podStartSLOduration=1.960545429 podStartE2EDuration="1.960545429s" podCreationTimestamp="2025-11-26 11:26:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:26:06.957152547 +0000 UTC m=+926.548364069" watchObservedRunningTime="2025-11-26 11:26:06.960545429 +0000 UTC m=+926.551756951" Nov 26 11:26:07 crc kubenswrapper[4622]: I1126 11:26:07.946959 4622 generic.go:334] "Generic (PLEG): container finished" podID="8746b761-9144-452e-aa5c-4c4ca60b03ad" containerID="da0c22113970c5ace2726fb60821d5564da901faf8ce88a8881313486d7dc926" exitCode=0 Nov 26 11:26:07 crc kubenswrapper[4622]: I1126 11:26:07.947003 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-89f8-account-create-update-5chl8" event={"ID":"8746b761-9144-452e-aa5c-4c4ca60b03ad","Type":"ContainerDied","Data":"da0c22113970c5ace2726fb60821d5564da901faf8ce88a8881313486d7dc926"} Nov 26 11:26:07 crc kubenswrapper[4622]: I1126 11:26:07.949050 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerStarted","Data":"09f037d1efcc2af40b89921183d8b28f542b87830b3b558ff41f85815620e68b"} Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.242623 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.405153 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts\") pod \"7e816521-5607-43d2-89b6-d8643c61eb01\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.405449 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm49b\" (UniqueName: \"kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b\") pod \"7e816521-5607-43d2-89b6-d8643c61eb01\" (UID: \"7e816521-5607-43d2-89b6-d8643c61eb01\") " Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.406177 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e816521-5607-43d2-89b6-d8643c61eb01" (UID: "7e816521-5607-43d2-89b6-d8643c61eb01"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.413139 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b" (OuterVolumeSpecName: "kube-api-access-lm49b") pod "7e816521-5607-43d2-89b6-d8643c61eb01" (UID: "7e816521-5607-43d2-89b6-d8643c61eb01"). InnerVolumeSpecName "kube-api-access-lm49b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.507734 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e816521-5607-43d2-89b6-d8643c61eb01-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.507766 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm49b\" (UniqueName: \"kubernetes.io/projected/7e816521-5607-43d2-89b6-d8643c61eb01-kube-api-access-lm49b\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.740840 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.961141 4622 generic.go:334] "Generic (PLEG): container finished" podID="02f6d949-d899-4367-9648-006b5fdb26e4" containerID="09f037d1efcc2af40b89921183d8b28f542b87830b3b558ff41f85815620e68b" exitCode=0 Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.961282 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerDied","Data":"09f037d1efcc2af40b89921183d8b28f542b87830b3b558ff41f85815620e68b"} Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.966435 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zxx5f" Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.967550 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zxx5f" event={"ID":"7e816521-5607-43d2-89b6-d8643c61eb01","Type":"ContainerDied","Data":"320855838fe4e370bc87ea441303264263c518ef8f3c6a0b3edaaeb9700b5ca8"} Nov 26 11:26:08 crc kubenswrapper[4622]: I1126 11:26:08.968273 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="320855838fe4e370bc87ea441303264263c518ef8f3c6a0b3edaaeb9700b5ca8" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.254162 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.422873 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts\") pod \"8746b761-9144-452e-aa5c-4c4ca60b03ad\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.423341 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7k4cp\" (UniqueName: \"kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp\") pod \"8746b761-9144-452e-aa5c-4c4ca60b03ad\" (UID: \"8746b761-9144-452e-aa5c-4c4ca60b03ad\") " Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.423698 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8746b761-9144-452e-aa5c-4c4ca60b03ad" (UID: "8746b761-9144-452e-aa5c-4c4ca60b03ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.424257 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8746b761-9144-452e-aa5c-4c4ca60b03ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.429144 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp" (OuterVolumeSpecName: "kube-api-access-7k4cp") pod "8746b761-9144-452e-aa5c-4c4ca60b03ad" (UID: "8746b761-9144-452e-aa5c-4c4ca60b03ad"). InnerVolumeSpecName "kube-api-access-7k4cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.525859 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7k4cp\" (UniqueName: \"kubernetes.io/projected/8746b761-9144-452e-aa5c-4c4ca60b03ad-kube-api-access-7k4cp\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.974903 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerStarted","Data":"357e8330b3343687d5c623f7c14074b87ceb3aff24ca54686d204844f68a0324"} Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.976074 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-89f8-account-create-update-5chl8" event={"ID":"8746b761-9144-452e-aa5c-4c4ca60b03ad","Type":"ContainerDied","Data":"df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231"} Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.976144 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df36e8c3d8d96e41ce1e1035141ca53f8d8ab6c1d4d37fa7f304c1d91bf6d231" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.976178 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-89f8-account-create-update-5chl8" Nov 26 11:26:09 crc kubenswrapper[4622]: I1126 11:26:09.995993 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zfq2z" podStartSLOduration=2.445065704 podStartE2EDuration="4.995976776s" podCreationTimestamp="2025-11-26 11:26:05 +0000 UTC" firstStartedPulling="2025-11-26 11:26:06.941061305 +0000 UTC m=+926.532272828" lastFinishedPulling="2025-11-26 11:26:09.491972378 +0000 UTC m=+929.083183900" observedRunningTime="2025-11-26 11:26:09.993533125 +0000 UTC m=+929.584744668" watchObservedRunningTime="2025-11-26 11:26:09.995976776 +0000 UTC m=+929.587188298" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.197723 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hjhvx"] Nov 26 11:26:11 crc kubenswrapper[4622]: E1126 11:26:11.198113 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8746b761-9144-452e-aa5c-4c4ca60b03ad" containerName="mariadb-account-create-update" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.198131 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="8746b761-9144-452e-aa5c-4c4ca60b03ad" containerName="mariadb-account-create-update" Nov 26 11:26:11 crc kubenswrapper[4622]: E1126 11:26:11.198154 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e816521-5607-43d2-89b6-d8643c61eb01" containerName="mariadb-database-create" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.198160 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e816521-5607-43d2-89b6-d8643c61eb01" containerName="mariadb-database-create" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.198338 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e816521-5607-43d2-89b6-d8643c61eb01" containerName="mariadb-database-create" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.198367 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="8746b761-9144-452e-aa5c-4c4ca60b03ad" containerName="mariadb-account-create-update" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.198987 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.200664 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.200670 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-62dwh" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.206032 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hjhvx"] Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.359985 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.360290 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.360345 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.360396 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6v57\" (UniqueName: \"kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.462534 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6v57\" (UniqueName: \"kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.462873 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.462955 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.463800 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.468665 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.468786 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.469229 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.476763 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6v57\" (UniqueName: \"kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57\") pod \"glance-db-sync-hjhvx\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.517342 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.947583 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hjhvx"] Nov 26 11:26:11 crc kubenswrapper[4622]: W1126 11:26:11.951370 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bbaef1d_2eba_4b11_a853_97b7af40b7e7.slice/crio-64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930 WatchSource:0}: Error finding container 64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930: Status 404 returned error can't find the container with id 64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930 Nov 26 11:26:11 crc kubenswrapper[4622]: I1126 11:26:11.989231 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hjhvx" event={"ID":"8bbaef1d-2eba-4b11-a853-97b7af40b7e7","Type":"ContainerStarted","Data":"64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930"} Nov 26 11:26:12 crc kubenswrapper[4622]: I1126 11:26:12.786202 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:12 crc kubenswrapper[4622]: I1126 11:26:12.822480 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.012810 4622 generic.go:334] "Generic (PLEG): container finished" podID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerID="041b7b43d0fdde5d09001c58a1e433023ac172b898a5c6b1424d9cc25714de94" exitCode=0 Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.012899 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerDied","Data":"041b7b43d0fdde5d09001c58a1e433023ac172b898a5c6b1424d9cc25714de94"} Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.014339 4622 generic.go:334] "Generic (PLEG): container finished" podID="38fbda9e-5203-4941-829d-1309dcf835e9" containerID="5deadae99260bef731001a5f824300cc52b829eee6bb202d2644be4131ea1279" exitCode=0 Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.014413 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerDied","Data":"5deadae99260bef731001a5f824300cc52b829eee6bb202d2644be4131ea1279"} Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.014582 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pz5p4" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="registry-server" containerID="cri-o://1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a" gracePeriod=2 Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.406351 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.497613 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities\") pod \"f17aff26-2624-4a58-aed0-4cd5d316e818\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.497775 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content\") pod \"f17aff26-2624-4a58-aed0-4cd5d316e818\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.497826 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ctm4\" (UniqueName: \"kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4\") pod \"f17aff26-2624-4a58-aed0-4cd5d316e818\" (UID: \"f17aff26-2624-4a58-aed0-4cd5d316e818\") " Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.498348 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities" (OuterVolumeSpecName: "utilities") pod "f17aff26-2624-4a58-aed0-4cd5d316e818" (UID: "f17aff26-2624-4a58-aed0-4cd5d316e818"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.498567 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.503453 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4" (OuterVolumeSpecName: "kube-api-access-5ctm4") pod "f17aff26-2624-4a58-aed0-4cd5d316e818" (UID: "f17aff26-2624-4a58-aed0-4cd5d316e818"). InnerVolumeSpecName "kube-api-access-5ctm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.510472 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f17aff26-2624-4a58-aed0-4cd5d316e818" (UID: "f17aff26-2624-4a58-aed0-4cd5d316e818"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.600819 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f17aff26-2624-4a58-aed0-4cd5d316e818-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:13 crc kubenswrapper[4622]: I1126 11:26:13.600865 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ctm4\" (UniqueName: \"kubernetes.io/projected/f17aff26-2624-4a58-aed0-4cd5d316e818-kube-api-access-5ctm4\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.027564 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerStarted","Data":"a8b7ed8165d741ca74012d4ee2b84f6c0cfb640926e04d1fedef8bbf4bb5145f"} Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.028274 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.031775 4622 generic.go:334] "Generic (PLEG): container finished" podID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerID="1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a" exitCode=0 Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.031914 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerDied","Data":"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a"} Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.032081 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pz5p4" event={"ID":"f17aff26-2624-4a58-aed0-4cd5d316e818","Type":"ContainerDied","Data":"4639ff5a97bfa3658978d3a9c2376b5e88393c31e1c121eb88c9b47edfea38d8"} Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.032115 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pz5p4" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.032115 4622 scope.go:117] "RemoveContainer" containerID="1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.034171 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerStarted","Data":"536c6d3ee68590a3ee610d404c45fa1623d311b72c6455e3c5300793331111de"} Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.034328 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.053936 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.75860865 podStartE2EDuration="49.053892394s" podCreationTimestamp="2025-11-26 11:25:25 +0000 UTC" firstStartedPulling="2025-11-26 11:25:28.692831262 +0000 UTC m=+888.284042783" lastFinishedPulling="2025-11-26 11:25:39.988115005 +0000 UTC m=+899.579326527" observedRunningTime="2025-11-26 11:26:14.045111245 +0000 UTC m=+933.636322767" watchObservedRunningTime="2025-11-26 11:26:14.053892394 +0000 UTC m=+933.645103937" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.073494 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=40.092146534 podStartE2EDuration="48.073467368s" podCreationTimestamp="2025-11-26 11:25:26 +0000 UTC" firstStartedPulling="2025-11-26 11:25:31.955617057 +0000 UTC m=+891.546828578" lastFinishedPulling="2025-11-26 11:25:39.93693789 +0000 UTC m=+899.528149412" observedRunningTime="2025-11-26 11:26:14.065575357 +0000 UTC m=+933.656786900" watchObservedRunningTime="2025-11-26 11:26:14.073467368 +0000 UTC m=+933.664678891" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.076028 4622 scope.go:117] "RemoveContainer" containerID="e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.085665 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.090143 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pz5p4"] Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.108667 4622 scope.go:117] "RemoveContainer" containerID="9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.125707 4622 scope.go:117] "RemoveContainer" containerID="1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a" Nov 26 11:26:14 crc kubenswrapper[4622]: E1126 11:26:14.126072 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a\": container with ID starting with 1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a not found: ID does not exist" containerID="1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.126122 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a"} err="failed to get container status \"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a\": rpc error: code = NotFound desc = could not find container \"1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a\": container with ID starting with 1ddfdf391c22a97742a433ce5d86f4ddfe45d9c29fa2add31aff8b6a423ca61a not found: ID does not exist" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.126152 4622 scope.go:117] "RemoveContainer" containerID="e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1" Nov 26 11:26:14 crc kubenswrapper[4622]: E1126 11:26:14.126360 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1\": container with ID starting with e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1 not found: ID does not exist" containerID="e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.126431 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1"} err="failed to get container status \"e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1\": rpc error: code = NotFound desc = could not find container \"e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1\": container with ID starting with e93c74c3bd109a5b4a5e671528e92ebdafc0e032146d3eeca363de237457f3b1 not found: ID does not exist" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.126493 4622 scope.go:117] "RemoveContainer" containerID="9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f" Nov 26 11:26:14 crc kubenswrapper[4622]: E1126 11:26:14.127003 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f\": container with ID starting with 9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f not found: ID does not exist" containerID="9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.127039 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f"} err="failed to get container status \"9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f\": rpc error: code = NotFound desc = could not find container \"9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f\": container with ID starting with 9f573418f31f6581a1b8be621caf2f623991ffd9dfdc88c5021946dd4163ad7f not found: ID does not exist" Nov 26 11:26:14 crc kubenswrapper[4622]: I1126 11:26:14.715560 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" path="/var/lib/kubelet/pods/f17aff26-2624-4a58-aed0-4cd5d316e818/volumes" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.198533 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.198595 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.198641 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.199324 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.199389 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df" gracePeriod=600 Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.534228 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.534529 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:15 crc kubenswrapper[4622]: I1126 11:26:15.567940 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.068989 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df" exitCode=0 Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.069053 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df"} Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.069334 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4"} Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.069355 4622 scope.go:117] "RemoveContainer" containerID="6dbfb41c933dbd29df57c403106270419f480c91a5742a0e6130afbb763abb32" Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.120481 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.551117 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8n2wh" podUID="1fba939c-4058-428f-a359-ea4e031e9fb3" containerName="ovn-controller" probeResult="failure" output=< Nov 26 11:26:16 crc kubenswrapper[4622]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 26 11:26:16 crc kubenswrapper[4622]: > Nov 26 11:26:16 crc kubenswrapper[4622]: I1126 11:26:16.815409 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:18 crc kubenswrapper[4622]: I1126 11:26:18.090890 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zfq2z" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="registry-server" containerID="cri-o://357e8330b3343687d5c623f7c14074b87ceb3aff24ca54686d204844f68a0324" gracePeriod=2 Nov 26 11:26:19 crc kubenswrapper[4622]: I1126 11:26:19.107822 4622 generic.go:334] "Generic (PLEG): container finished" podID="02f6d949-d899-4367-9648-006b5fdb26e4" containerID="357e8330b3343687d5c623f7c14074b87ceb3aff24ca54686d204844f68a0324" exitCode=0 Nov 26 11:26:19 crc kubenswrapper[4622]: I1126 11:26:19.107936 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerDied","Data":"357e8330b3343687d5c623f7c14074b87ceb3aff24ca54686d204844f68a0324"} Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.227370 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:20 crc kubenswrapper[4622]: E1126 11:26:20.227975 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="extract-content" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.227990 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="extract-content" Nov 26 11:26:20 crc kubenswrapper[4622]: E1126 11:26:20.228003 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="registry-server" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.228010 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="registry-server" Nov 26 11:26:20 crc kubenswrapper[4622]: E1126 11:26:20.228034 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="extract-utilities" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.228040 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="extract-utilities" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.228207 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17aff26-2624-4a58-aed0-4cd5d316e818" containerName="registry-server" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.232316 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.244911 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.320649 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqpmx\" (UniqueName: \"kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.320711 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.320772 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.422611 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.422730 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqpmx\" (UniqueName: \"kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.422777 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.423209 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.423379 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.440854 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqpmx\" (UniqueName: \"kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx\") pod \"redhat-operators-r44l7\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:20 crc kubenswrapper[4622]: I1126 11:26:20.558480 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.559044 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8n2wh" podUID="1fba939c-4058-428f-a359-ea4e031e9fb3" containerName="ovn-controller" probeResult="failure" output=< Nov 26 11:26:21 crc kubenswrapper[4622]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 26 11:26:21 crc kubenswrapper[4622]: > Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.559141 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.568566 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mj2jt" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.778054 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8n2wh-config-lnm5k"] Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.779218 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.781238 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.785647 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh-config-lnm5k"] Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.845832 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.845909 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.845972 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.845990 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvm4r\" (UniqueName: \"kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.846006 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.846092 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.947875 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.947997 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948090 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948131 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvm4r\" (UniqueName: \"kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948149 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948299 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948486 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.948449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.949494 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.950968 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:21 crc kubenswrapper[4622]: I1126 11:26:21.976795 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvm4r\" (UniqueName: \"kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r\") pod \"ovn-controller-8n2wh-config-lnm5k\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:22 crc kubenswrapper[4622]: I1126 11:26:22.101163 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.145148 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zfq2z" event={"ID":"02f6d949-d899-4367-9648-006b5fdb26e4","Type":"ContainerDied","Data":"0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d"} Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.145631 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0abe3f19dc74a04cd46c46840caa9b6bf5e49410037ac503b881583d858b7f1d" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.148818 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.168578 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities\") pod \"02f6d949-d899-4367-9648-006b5fdb26e4\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.168674 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content\") pod \"02f6d949-d899-4367-9648-006b5fdb26e4\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.168726 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j4fx\" (UniqueName: \"kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx\") pod \"02f6d949-d899-4367-9648-006b5fdb26e4\" (UID: \"02f6d949-d899-4367-9648-006b5fdb26e4\") " Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.169439 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities" (OuterVolumeSpecName: "utilities") pod "02f6d949-d899-4367-9648-006b5fdb26e4" (UID: "02f6d949-d899-4367-9648-006b5fdb26e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.178771 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx" (OuterVolumeSpecName: "kube-api-access-5j4fx") pod "02f6d949-d899-4367-9648-006b5fdb26e4" (UID: "02f6d949-d899-4367-9648-006b5fdb26e4"). InnerVolumeSpecName "kube-api-access-5j4fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.213664 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02f6d949-d899-4367-9648-006b5fdb26e4" (UID: "02f6d949-d899-4367-9648-006b5fdb26e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.272695 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.272727 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j4fx\" (UniqueName: \"kubernetes.io/projected/02f6d949-d899-4367-9648-006b5fdb26e4-kube-api-access-5j4fx\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.272738 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02f6d949-d899-4367-9648-006b5fdb26e4-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.275353 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh-config-lnm5k"] Nov 26 11:26:23 crc kubenswrapper[4622]: W1126 11:26:23.282642 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c339f88_a427_4951_ae98_3fe33469924b.slice/crio-b5130630e26ae4bbe092b44443632d79aa67649eedc9ab8b92c5b7aaf89c66de WatchSource:0}: Error finding container b5130630e26ae4bbe092b44443632d79aa67649eedc9ab8b92c5b7aaf89c66de: Status 404 returned error can't find the container with id b5130630e26ae4bbe092b44443632d79aa67649eedc9ab8b92c5b7aaf89c66de Nov 26 11:26:23 crc kubenswrapper[4622]: I1126 11:26:23.284290 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.153395 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hjhvx" event={"ID":"8bbaef1d-2eba-4b11-a853-97b7af40b7e7","Type":"ContainerStarted","Data":"dc70b9e7db144cb00031158f11022f52078086c017a84a09c2bf0471931c5104"} Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.155011 4622 generic.go:334] "Generic (PLEG): container finished" podID="3dc50dca-d916-4af3-b3a1-483ad96f1496" containerID="52d726569743b69d2469fd106d8b307ba882c9c7a5598c07db3c7de049deffb4" exitCode=0 Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.155181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-lnm5k" event={"ID":"3dc50dca-d916-4af3-b3a1-483ad96f1496","Type":"ContainerDied","Data":"52d726569743b69d2469fd106d8b307ba882c9c7a5598c07db3c7de049deffb4"} Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.155218 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-lnm5k" event={"ID":"3dc50dca-d916-4af3-b3a1-483ad96f1496","Type":"ContainerStarted","Data":"4a2c6d4f855344c172d705b132c4f63b353a98508301befda4a0e7dcd01d256f"} Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.156826 4622 generic.go:334] "Generic (PLEG): container finished" podID="4c339f88-a427-4951-ae98-3fe33469924b" containerID="eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f" exitCode=0 Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.156882 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerDied","Data":"eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f"} Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.156902 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerStarted","Data":"b5130630e26ae4bbe092b44443632d79aa67649eedc9ab8b92c5b7aaf89c66de"} Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.156990 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zfq2z" Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.185002 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hjhvx" podStartSLOduration=2.239798663 podStartE2EDuration="13.184985646s" podCreationTimestamp="2025-11-26 11:26:11 +0000 UTC" firstStartedPulling="2025-11-26 11:26:11.953401315 +0000 UTC m=+931.544612837" lastFinishedPulling="2025-11-26 11:26:22.898588298 +0000 UTC m=+942.489799820" observedRunningTime="2025-11-26 11:26:24.179742051 +0000 UTC m=+943.770953573" watchObservedRunningTime="2025-11-26 11:26:24.184985646 +0000 UTC m=+943.776197167" Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.197778 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.204759 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zfq2z"] Nov 26 11:26:24 crc kubenswrapper[4622]: I1126 11:26:24.715052 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" path="/var/lib/kubelet/pods/02f6d949-d899-4367-9648-006b5fdb26e4/volumes" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.166162 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerStarted","Data":"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1"} Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.492179 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.617646 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvm4r\" (UniqueName: \"kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.617717 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.617811 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.617918 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.617979 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.618011 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts\") pod \"3dc50dca-d916-4af3-b3a1-483ad96f1496\" (UID: \"3dc50dca-d916-4af3-b3a1-483ad96f1496\") " Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.618152 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.618638 4622 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.619288 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run" (OuterVolumeSpecName: "var-run") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.619328 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.619658 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.619978 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts" (OuterVolumeSpecName: "scripts") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.631165 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r" (OuterVolumeSpecName: "kube-api-access-kvm4r") pod "3dc50dca-d916-4af3-b3a1-483ad96f1496" (UID: "3dc50dca-d916-4af3-b3a1-483ad96f1496"). InnerVolumeSpecName "kube-api-access-kvm4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.719666 4622 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.719693 4622 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.719708 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvm4r\" (UniqueName: \"kubernetes.io/projected/3dc50dca-d916-4af3-b3a1-483ad96f1496-kube-api-access-kvm4r\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.719718 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dc50dca-d916-4af3-b3a1-483ad96f1496-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:25 crc kubenswrapper[4622]: I1126 11:26:25.719725 4622 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3dc50dca-d916-4af3-b3a1-483ad96f1496-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.178032 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-lnm5k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.178411 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-lnm5k" event={"ID":"3dc50dca-d916-4af3-b3a1-483ad96f1496","Type":"ContainerDied","Data":"4a2c6d4f855344c172d705b132c4f63b353a98508301befda4a0e7dcd01d256f"} Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.178482 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a2c6d4f855344c172d705b132c4f63b353a98508301befda4a0e7dcd01d256f" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.180372 4622 generic.go:334] "Generic (PLEG): container finished" podID="4c339f88-a427-4951-ae98-3fe33469924b" containerID="97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1" exitCode=0 Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.180409 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerDied","Data":"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1"} Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.552287 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8n2wh" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.571895 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8n2wh-config-lnm5k"] Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.577889 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8n2wh-config-lnm5k"] Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.713189 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc50dca-d916-4af3-b3a1-483ad96f1496" path="/var/lib/kubelet/pods/3dc50dca-d916-4af3-b3a1-483ad96f1496/volumes" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.718448 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8n2wh-config-k2n6k"] Nov 26 11:26:26 crc kubenswrapper[4622]: E1126 11:26:26.718769 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="registry-server" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.718787 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="registry-server" Nov 26 11:26:26 crc kubenswrapper[4622]: E1126 11:26:26.718798 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="extract-utilities" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.718805 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="extract-utilities" Nov 26 11:26:26 crc kubenswrapper[4622]: E1126 11:26:26.718823 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc50dca-d916-4af3-b3a1-483ad96f1496" containerName="ovn-config" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.718830 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc50dca-d916-4af3-b3a1-483ad96f1496" containerName="ovn-config" Nov 26 11:26:26 crc kubenswrapper[4622]: E1126 11:26:26.718839 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="extract-content" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.718844 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="extract-content" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.719005 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="02f6d949-d899-4367-9648-006b5fdb26e4" containerName="registry-server" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.719020 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc50dca-d916-4af3-b3a1-483ad96f1496" containerName="ovn-config" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.719531 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.723065 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.746771 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh-config-k2n6k"] Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.843852 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.843917 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.844032 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.844123 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.844287 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bk4m\" (UniqueName: \"kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.844331 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946247 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bk4m\" (UniqueName: \"kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946301 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946450 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946514 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946547 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946585 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946758 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946780 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.946810 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.947684 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.948469 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:26 crc kubenswrapper[4622]: I1126 11:26:26.963833 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bk4m\" (UniqueName: \"kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m\") pod \"ovn-controller-8n2wh-config-k2n6k\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.032638 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.132698 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.195725 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerStarted","Data":"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd"} Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.213998 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r44l7" podStartSLOduration=4.533463601 podStartE2EDuration="7.213982302s" podCreationTimestamp="2025-11-26 11:26:20 +0000 UTC" firstStartedPulling="2025-11-26 11:26:24.15835085 +0000 UTC m=+943.749562372" lastFinishedPulling="2025-11-26 11:26:26.838869551 +0000 UTC m=+946.430081073" observedRunningTime="2025-11-26 11:26:27.212478084 +0000 UTC m=+946.803689607" watchObservedRunningTime="2025-11-26 11:26:27.213982302 +0000 UTC m=+946.805193824" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.403289 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-dlkff"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.404328 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.412692 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.413973 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dlkff"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.455009 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.455299 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7ss7\" (UniqueName: \"kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.465565 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8n2wh-config-k2n6k"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.518529 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6k9fh"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.519568 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.552398 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-20d7-account-create-update-x2dsx"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.553455 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.556776 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.556944 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.556971 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7ss7\" (UniqueName: \"kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.557138 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkwsm\" (UniqueName: \"kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.563018 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.564220 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6k9fh"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.571230 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.600667 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7ss7\" (UniqueName: \"kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7\") pod \"cinder-db-create-dlkff\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.602852 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-20d7-account-create-update-x2dsx"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.629142 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-aa81-account-create-update-hlh9d"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.630664 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.633750 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.643734 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-aa81-account-create-update-hlh9d"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659494 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659555 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpzh\" (UniqueName: \"kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659591 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkwsm\" (UniqueName: \"kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659660 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659747 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88q9h\" (UniqueName: \"kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.659791 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.660267 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.674274 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkwsm\" (UniqueName: \"kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm\") pod \"barbican-db-create-6k9fh\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.720990 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.721689 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-4dj9p"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.722732 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.728921 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4dj9p"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.747568 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-mwptb"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.748560 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.752088 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.752352 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.752480 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gr2p8" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.752617 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.761773 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mwptb"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762177 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88q9h\" (UniqueName: \"kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762231 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762324 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr5ps\" (UniqueName: \"kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762364 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762385 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.762401 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpzh\" (UniqueName: \"kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.763865 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.766969 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.779913 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88q9h\" (UniqueName: \"kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h\") pod \"barbican-aa81-account-create-update-hlh9d\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.783860 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpzh\" (UniqueName: \"kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh\") pod \"cinder-20d7-account-create-update-x2dsx\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.811274 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-4d48-account-create-update-bmtqm"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.812456 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.816911 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.819520 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4d48-account-create-update-bmtqm"] Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.841180 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.863952 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864014 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864046 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8pq6\" (UniqueName: \"kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864067 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnb5h\" (UniqueName: \"kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864111 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr5ps\" (UniqueName: \"kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864145 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.864177 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.865045 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.892966 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr5ps\" (UniqueName: \"kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps\") pod \"neutron-db-create-4dj9p\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.905959 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.954039 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.966275 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.966324 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8pq6\" (UniqueName: \"kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.966350 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnb5h\" (UniqueName: \"kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.966403 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.966477 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.967048 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.970193 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.970907 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.984683 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8pq6\" (UniqueName: \"kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6\") pod \"neutron-4d48-account-create-update-bmtqm\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:27 crc kubenswrapper[4622]: I1126 11:26:27.989456 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnb5h\" (UniqueName: \"kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h\") pod \"keystone-db-sync-mwptb\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.044729 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.070628 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.130464 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.248246 4622 generic.go:334] "Generic (PLEG): container finished" podID="7c52e23a-ed9f-440a-9500-b73d548e9088" containerID="e68baed220f32ac667d7db1f9ad14c735c05432101aca2601759f038799f6635" exitCode=0 Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.248596 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-k2n6k" event={"ID":"7c52e23a-ed9f-440a-9500-b73d548e9088","Type":"ContainerDied","Data":"e68baed220f32ac667d7db1f9ad14c735c05432101aca2601759f038799f6635"} Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.248629 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-k2n6k" event={"ID":"7c52e23a-ed9f-440a-9500-b73d548e9088","Type":"ContainerStarted","Data":"bd5567e53b83715255f390de8d33c1046d2e95f2c9cc8e55201f438ef513fda3"} Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.261131 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dlkff"] Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.266989 4622 generic.go:334] "Generic (PLEG): container finished" podID="8bbaef1d-2eba-4b11-a853-97b7af40b7e7" containerID="dc70b9e7db144cb00031158f11022f52078086c017a84a09c2bf0471931c5104" exitCode=0 Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.267289 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hjhvx" event={"ID":"8bbaef1d-2eba-4b11-a853-97b7af40b7e7","Type":"ContainerDied","Data":"dc70b9e7db144cb00031158f11022f52078086c017a84a09c2bf0471931c5104"} Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.384978 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6k9fh"] Nov 26 11:26:28 crc kubenswrapper[4622]: W1126 11:26:28.391623 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb537dfd7_4088_4292_bf23_aedf0e2bd0df.slice/crio-82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac WatchSource:0}: Error finding container 82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac: Status 404 returned error can't find the container with id 82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.445321 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-20d7-account-create-update-x2dsx"] Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.541660 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-aa81-account-create-update-hlh9d"] Nov 26 11:26:28 crc kubenswrapper[4622]: W1126 11:26:28.555862 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1058139e_d30c_419c_9819_91c5418144fa.slice/crio-2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d WatchSource:0}: Error finding container 2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d: Status 404 returned error can't find the container with id 2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.621911 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4dj9p"] Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.695994 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4d48-account-create-update-bmtqm"] Nov 26 11:26:28 crc kubenswrapper[4622]: W1126 11:26:28.702626 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c071132_6163_40db_92b0_121c2d362a69.slice/crio-d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad WatchSource:0}: Error finding container d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad: Status 404 returned error can't find the container with id d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad Nov 26 11:26:28 crc kubenswrapper[4622]: I1126 11:26:28.729882 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mwptb"] Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.324081 4622 generic.go:334] "Generic (PLEG): container finished" podID="a87d2790-eea1-4fb5-8f6c-fb4931e66b28" containerID="d2d11cca6f9226bb774372212b8c86d6279ddf6b04da7c4946f55d5d7089a8cf" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.324360 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-20d7-account-create-update-x2dsx" event={"ID":"a87d2790-eea1-4fb5-8f6c-fb4931e66b28","Type":"ContainerDied","Data":"d2d11cca6f9226bb774372212b8c86d6279ddf6b04da7c4946f55d5d7089a8cf"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.324392 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-20d7-account-create-update-x2dsx" event={"ID":"a87d2790-eea1-4fb5-8f6c-fb4931e66b28","Type":"ContainerStarted","Data":"dc80cb4f7bb31575558936adf3c7c0d6a82ebd9d52ba1a3efa4ad858512bbc16"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.357701 4622 generic.go:334] "Generic (PLEG): container finished" podID="d731d848-aaea-425b-be07-5e852d58a45d" containerID="f703df987afdc852d4ad06b2a0847989c84279ed13ea7f9c15d5872ccab10ab1" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.357782 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4dj9p" event={"ID":"d731d848-aaea-425b-be07-5e852d58a45d","Type":"ContainerDied","Data":"f703df987afdc852d4ad06b2a0847989c84279ed13ea7f9c15d5872ccab10ab1"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.357807 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4dj9p" event={"ID":"d731d848-aaea-425b-be07-5e852d58a45d","Type":"ContainerStarted","Data":"be43a6f665df05db735aca46b2566fcafb3bf4d8cb17769167de6111b74d216f"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.386730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mwptb" event={"ID":"d1451a45-9875-4a5e-b265-bd0c240cb628","Type":"ContainerStarted","Data":"9fa73c07a223360cec3c467f001d023169ea953461bac705bf0726fbc3a6dc94"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.419710 4622 generic.go:334] "Generic (PLEG): container finished" podID="1058139e-d30c-419c-9819-91c5418144fa" containerID="56e79ca988efded5e0e4bc2b087d8f27e315c36ca34e49d8faee9a4a2cb0a2a9" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.419803 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-aa81-account-create-update-hlh9d" event={"ID":"1058139e-d30c-419c-9819-91c5418144fa","Type":"ContainerDied","Data":"56e79ca988efded5e0e4bc2b087d8f27e315c36ca34e49d8faee9a4a2cb0a2a9"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.419836 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-aa81-account-create-update-hlh9d" event={"ID":"1058139e-d30c-419c-9819-91c5418144fa","Type":"ContainerStarted","Data":"2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.440239 4622 generic.go:334] "Generic (PLEG): container finished" podID="b537dfd7-4088-4292-bf23-aedf0e2bd0df" containerID="eb592874236e8fc40f5048a3226ede035fefc5bfddfcc7bbc05beaf76d4fe79b" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.440331 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6k9fh" event={"ID":"b537dfd7-4088-4292-bf23-aedf0e2bd0df","Type":"ContainerDied","Data":"eb592874236e8fc40f5048a3226ede035fefc5bfddfcc7bbc05beaf76d4fe79b"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.440363 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6k9fh" event={"ID":"b537dfd7-4088-4292-bf23-aedf0e2bd0df","Type":"ContainerStarted","Data":"82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.446972 4622 generic.go:334] "Generic (PLEG): container finished" podID="2c071132-6163-40db-92b0-121c2d362a69" containerID="ade24a4ee019823fdb8252a10bd41beee53da372a730b7350233dbb6656f0749" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.447033 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4d48-account-create-update-bmtqm" event={"ID":"2c071132-6163-40db-92b0-121c2d362a69","Type":"ContainerDied","Data":"ade24a4ee019823fdb8252a10bd41beee53da372a730b7350233dbb6656f0749"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.447058 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4d48-account-create-update-bmtqm" event={"ID":"2c071132-6163-40db-92b0-121c2d362a69","Type":"ContainerStarted","Data":"d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.448095 4622 generic.go:334] "Generic (PLEG): container finished" podID="690c5cab-acaa-440f-803e-5c8ae6705486" containerID="c09a373ee7912a4866ad896ded228f0c616fc3dff7abd1b07e86ae9bd436ce67" exitCode=0 Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.448280 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dlkff" event={"ID":"690c5cab-acaa-440f-803e-5c8ae6705486","Type":"ContainerDied","Data":"c09a373ee7912a4866ad896ded228f0c616fc3dff7abd1b07e86ae9bd436ce67"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.448305 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dlkff" event={"ID":"690c5cab-acaa-440f-803e-5c8ae6705486","Type":"ContainerStarted","Data":"1e3cf3466f202d27c2faa7c38633645a513fc3a8c1451e792fac68db00ddf0b6"} Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.769930 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807204 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807322 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807380 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bk4m\" (UniqueName: \"kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807435 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807442 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807461 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807678 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run\") pod \"7c52e23a-ed9f-440a-9500-b73d548e9088\" (UID: \"7c52e23a-ed9f-440a-9500-b73d548e9088\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.807977 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808010 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808032 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run" (OuterVolumeSpecName: "var-run") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808188 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts" (OuterVolumeSpecName: "scripts") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808392 4622 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808409 4622 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808417 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808426 4622 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7c52e23a-ed9f-440a-9500-b73d548e9088-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.808433 4622 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7c52e23a-ed9f-440a-9500-b73d548e9088-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.812793 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m" (OuterVolumeSpecName: "kube-api-access-6bk4m") pod "7c52e23a-ed9f-440a-9500-b73d548e9088" (UID: "7c52e23a-ed9f-440a-9500-b73d548e9088"). InnerVolumeSpecName "kube-api-access-6bk4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.882826 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.909654 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6v57\" (UniqueName: \"kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57\") pod \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.909703 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data\") pod \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.909747 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data\") pod \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.909772 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle\") pod \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\" (UID: \"8bbaef1d-2eba-4b11-a853-97b7af40b7e7\") " Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.910266 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bk4m\" (UniqueName: \"kubernetes.io/projected/7c52e23a-ed9f-440a-9500-b73d548e9088-kube-api-access-6bk4m\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.913551 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57" (OuterVolumeSpecName: "kube-api-access-z6v57") pod "8bbaef1d-2eba-4b11-a853-97b7af40b7e7" (UID: "8bbaef1d-2eba-4b11-a853-97b7af40b7e7"). InnerVolumeSpecName "kube-api-access-z6v57". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.926330 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8bbaef1d-2eba-4b11-a853-97b7af40b7e7" (UID: "8bbaef1d-2eba-4b11-a853-97b7af40b7e7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.932234 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bbaef1d-2eba-4b11-a853-97b7af40b7e7" (UID: "8bbaef1d-2eba-4b11-a853-97b7af40b7e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:29 crc kubenswrapper[4622]: I1126 11:26:29.957245 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data" (OuterVolumeSpecName: "config-data") pod "8bbaef1d-2eba-4b11-a853-97b7af40b7e7" (UID: "8bbaef1d-2eba-4b11-a853-97b7af40b7e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.011397 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6v57\" (UniqueName: \"kubernetes.io/projected/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-kube-api-access-z6v57\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.011424 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.011436 4622 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.011444 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbaef1d-2eba-4b11-a853-97b7af40b7e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.458069 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8n2wh-config-k2n6k" event={"ID":"7c52e23a-ed9f-440a-9500-b73d548e9088","Type":"ContainerDied","Data":"bd5567e53b83715255f390de8d33c1046d2e95f2c9cc8e55201f438ef513fda3"} Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.458149 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd5567e53b83715255f390de8d33c1046d2e95f2c9cc8e55201f438ef513fda3" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.458084 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8n2wh-config-k2n6k" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.460352 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hjhvx" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.469042 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hjhvx" event={"ID":"8bbaef1d-2eba-4b11-a853-97b7af40b7e7","Type":"ContainerDied","Data":"64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930"} Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.469153 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64c27f688d05f600f90505dc163d3913e50754cc8f4b8de3eb172300d3f67930" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.561607 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.563692 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.644948 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:30 crc kubenswrapper[4622]: E1126 11:26:30.645681 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bbaef1d-2eba-4b11-a853-97b7af40b7e7" containerName="glance-db-sync" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.645696 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bbaef1d-2eba-4b11-a853-97b7af40b7e7" containerName="glance-db-sync" Nov 26 11:26:30 crc kubenswrapper[4622]: E1126 11:26:30.645708 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c52e23a-ed9f-440a-9500-b73d548e9088" containerName="ovn-config" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.645714 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c52e23a-ed9f-440a-9500-b73d548e9088" containerName="ovn-config" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.645979 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c52e23a-ed9f-440a-9500-b73d548e9088" containerName="ovn-config" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.646003 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bbaef1d-2eba-4b11-a853-97b7af40b7e7" containerName="glance-db-sync" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.655057 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.663051 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.728939 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.729009 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnx9j\" (UniqueName: \"kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.729162 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.731753 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.731785 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.825919 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.836758 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.836831 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnx9j\" (UniqueName: \"kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.836929 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.836994 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.837015 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.838023 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.838125 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.838159 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.838380 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.859653 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnx9j\" (UniqueName: \"kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j\") pod \"dnsmasq-dns-569d458467-l59xr\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.863063 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8n2wh-config-k2n6k"] Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.868476 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8n2wh-config-k2n6k"] Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.937718 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts\") pod \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.937965 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrpzh\" (UniqueName: \"kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh\") pod \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\" (UID: \"a87d2790-eea1-4fb5-8f6c-fb4931e66b28\") " Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.938372 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a87d2790-eea1-4fb5-8f6c-fb4931e66b28" (UID: "a87d2790-eea1-4fb5-8f6c-fb4931e66b28"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.942169 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh" (OuterVolumeSpecName: "kube-api-access-xrpzh") pod "a87d2790-eea1-4fb5-8f6c-fb4931e66b28" (UID: "a87d2790-eea1-4fb5-8f6c-fb4931e66b28"). InnerVolumeSpecName "kube-api-access-xrpzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:30 crc kubenswrapper[4622]: I1126 11:26:30.979779 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.040146 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrpzh\" (UniqueName: \"kubernetes.io/projected/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-kube-api-access-xrpzh\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.040448 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a87d2790-eea1-4fb5-8f6c-fb4931e66b28-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.119799 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.133142 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.141229 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts\") pod \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.141283 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkwsm\" (UniqueName: \"kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm\") pod \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\" (UID: \"b537dfd7-4088-4292-bf23-aedf0e2bd0df\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.141803 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b537dfd7-4088-4292-bf23-aedf0e2bd0df" (UID: "b537dfd7-4088-4292-bf23-aedf0e2bd0df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.142359 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b537dfd7-4088-4292-bf23-aedf0e2bd0df-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.148694 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm" (OuterVolumeSpecName: "kube-api-access-bkwsm") pod "b537dfd7-4088-4292-bf23-aedf0e2bd0df" (UID: "b537dfd7-4088-4292-bf23-aedf0e2bd0df"). InnerVolumeSpecName "kube-api-access-bkwsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.152951 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.171722 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.173279 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243243 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts\") pod \"690c5cab-acaa-440f-803e-5c8ae6705486\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243281 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7ss7\" (UniqueName: \"kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7\") pod \"690c5cab-acaa-440f-803e-5c8ae6705486\" (UID: \"690c5cab-acaa-440f-803e-5c8ae6705486\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243406 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts\") pod \"2c071132-6163-40db-92b0-121c2d362a69\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243432 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr5ps\" (UniqueName: \"kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps\") pod \"d731d848-aaea-425b-be07-5e852d58a45d\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243475 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts\") pod \"d731d848-aaea-425b-be07-5e852d58a45d\" (UID: \"d731d848-aaea-425b-be07-5e852d58a45d\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243545 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts\") pod \"1058139e-d30c-419c-9819-91c5418144fa\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243590 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88q9h\" (UniqueName: \"kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h\") pod \"1058139e-d30c-419c-9819-91c5418144fa\" (UID: \"1058139e-d30c-419c-9819-91c5418144fa\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.243612 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8pq6\" (UniqueName: \"kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6\") pod \"2c071132-6163-40db-92b0-121c2d362a69\" (UID: \"2c071132-6163-40db-92b0-121c2d362a69\") " Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.244045 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkwsm\" (UniqueName: \"kubernetes.io/projected/b537dfd7-4088-4292-bf23-aedf0e2bd0df-kube-api-access-bkwsm\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.244462 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1058139e-d30c-419c-9819-91c5418144fa" (UID: "1058139e-d30c-419c-9819-91c5418144fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.245166 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "690c5cab-acaa-440f-803e-5c8ae6705486" (UID: "690c5cab-acaa-440f-803e-5c8ae6705486"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.245239 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c071132-6163-40db-92b0-121c2d362a69" (UID: "2c071132-6163-40db-92b0-121c2d362a69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.245316 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d731d848-aaea-425b-be07-5e852d58a45d" (UID: "d731d848-aaea-425b-be07-5e852d58a45d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.248997 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps" (OuterVolumeSpecName: "kube-api-access-zr5ps") pod "d731d848-aaea-425b-be07-5e852d58a45d" (UID: "d731d848-aaea-425b-be07-5e852d58a45d"). InnerVolumeSpecName "kube-api-access-zr5ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.249410 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7" (OuterVolumeSpecName: "kube-api-access-t7ss7") pod "690c5cab-acaa-440f-803e-5c8ae6705486" (UID: "690c5cab-acaa-440f-803e-5c8ae6705486"). InnerVolumeSpecName "kube-api-access-t7ss7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.252728 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h" (OuterVolumeSpecName: "kube-api-access-88q9h") pod "1058139e-d30c-419c-9819-91c5418144fa" (UID: "1058139e-d30c-419c-9819-91c5418144fa"). InnerVolumeSpecName "kube-api-access-88q9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.257292 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6" (OuterVolumeSpecName: "kube-api-access-c8pq6") pod "2c071132-6163-40db-92b0-121c2d362a69" (UID: "2c071132-6163-40db-92b0-121c2d362a69"). InnerVolumeSpecName "kube-api-access-c8pq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.347997 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c071132-6163-40db-92b0-121c2d362a69-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348298 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr5ps\" (UniqueName: \"kubernetes.io/projected/d731d848-aaea-425b-be07-5e852d58a45d-kube-api-access-zr5ps\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348312 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d731d848-aaea-425b-be07-5e852d58a45d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348322 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1058139e-d30c-419c-9819-91c5418144fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348331 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88q9h\" (UniqueName: \"kubernetes.io/projected/1058139e-d30c-419c-9819-91c5418144fa-kube-api-access-88q9h\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348340 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8pq6\" (UniqueName: \"kubernetes.io/projected/2c071132-6163-40db-92b0-121c2d362a69-kube-api-access-c8pq6\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348348 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/690c5cab-acaa-440f-803e-5c8ae6705486-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.348357 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7ss7\" (UniqueName: \"kubernetes.io/projected/690c5cab-acaa-440f-803e-5c8ae6705486-kube-api-access-t7ss7\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.469491 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-aa81-account-create-update-hlh9d" event={"ID":"1058139e-d30c-419c-9819-91c5418144fa","Type":"ContainerDied","Data":"2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.469559 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-aa81-account-create-update-hlh9d" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.469577 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e6a1fcc9495bce4340047b0884599e572e026d10b83ed93b8c2058761bf4f3d" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.471289 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6k9fh" event={"ID":"b537dfd7-4088-4292-bf23-aedf0e2bd0df","Type":"ContainerDied","Data":"82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.471354 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82f7b29a53b5ba3c84820a45550975adfbdd071b82e47d6468b6ce8211d2eaac" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.471429 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6k9fh" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.473281 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4d48-account-create-update-bmtqm" event={"ID":"2c071132-6163-40db-92b0-121c2d362a69","Type":"ContainerDied","Data":"d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.473309 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d523d6ae2cbac49460a49c64a01de38d8b64f77f7d86eeb0d4129a47efe4e7ad" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.473337 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4d48-account-create-update-bmtqm" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.474633 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dlkff" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.474679 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dlkff" event={"ID":"690c5cab-acaa-440f-803e-5c8ae6705486","Type":"ContainerDied","Data":"1e3cf3466f202d27c2faa7c38633645a513fc3a8c1451e792fac68db00ddf0b6"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.474710 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e3cf3466f202d27c2faa7c38633645a513fc3a8c1451e792fac68db00ddf0b6" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.477678 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-20d7-account-create-update-x2dsx" event={"ID":"a87d2790-eea1-4fb5-8f6c-fb4931e66b28","Type":"ContainerDied","Data":"dc80cb4f7bb31575558936adf3c7c0d6a82ebd9d52ba1a3efa4ad858512bbc16"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.477703 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc80cb4f7bb31575558936adf3c7c0d6a82ebd9d52ba1a3efa4ad858512bbc16" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.477776 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-20d7-account-create-update-x2dsx" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.480357 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4dj9p" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.480362 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4dj9p" event={"ID":"d731d848-aaea-425b-be07-5e852d58a45d","Type":"ContainerDied","Data":"be43a6f665df05db735aca46b2566fcafb3bf4d8cb17769167de6111b74d216f"} Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.480397 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be43a6f665df05db735aca46b2566fcafb3bf4d8cb17769167de6111b74d216f" Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.534608 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:31 crc kubenswrapper[4622]: I1126 11:26:31.612191 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r44l7" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="registry-server" probeResult="failure" output=< Nov 26 11:26:31 crc kubenswrapper[4622]: timeout: failed to connect service ":50051" within 1s Nov 26 11:26:31 crc kubenswrapper[4622]: > Nov 26 11:26:32 crc kubenswrapper[4622]: I1126 11:26:32.716697 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c52e23a-ed9f-440a-9500-b73d548e9088" path="/var/lib/kubelet/pods/7c52e23a-ed9f-440a-9500-b73d548e9088/volumes" Nov 26 11:26:33 crc kubenswrapper[4622]: W1126 11:26:33.809367 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf31826d_1923_4efc_9726_063b74b7f6e0.slice/crio-7d77aac4fb5e37a18f96b16de4c61dce9b0e0109917625c85fd8874bb8e243d3 WatchSource:0}: Error finding container 7d77aac4fb5e37a18f96b16de4c61dce9b0e0109917625c85fd8874bb8e243d3: Status 404 returned error can't find the container with id 7d77aac4fb5e37a18f96b16de4c61dce9b0e0109917625c85fd8874bb8e243d3 Nov 26 11:26:34 crc kubenswrapper[4622]: I1126 11:26:34.500678 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mwptb" event={"ID":"d1451a45-9875-4a5e-b265-bd0c240cb628","Type":"ContainerStarted","Data":"e88e8776f1350ebc4e309ec3edf5a00865c3a95626d0ac0353f5d9a1ffe78c59"} Nov 26 11:26:34 crc kubenswrapper[4622]: I1126 11:26:34.502830 4622 generic.go:334] "Generic (PLEG): container finished" podID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerID="ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82" exitCode=0 Nov 26 11:26:34 crc kubenswrapper[4622]: I1126 11:26:34.502885 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-569d458467-l59xr" event={"ID":"bf31826d-1923-4efc-9726-063b74b7f6e0","Type":"ContainerDied","Data":"ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82"} Nov 26 11:26:34 crc kubenswrapper[4622]: I1126 11:26:34.502916 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-569d458467-l59xr" event={"ID":"bf31826d-1923-4efc-9726-063b74b7f6e0","Type":"ContainerStarted","Data":"7d77aac4fb5e37a18f96b16de4c61dce9b0e0109917625c85fd8874bb8e243d3"} Nov 26 11:26:34 crc kubenswrapper[4622]: I1126 11:26:34.517097 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-mwptb" podStartSLOduration=2.424854691 podStartE2EDuration="7.517073789s" podCreationTimestamp="2025-11-26 11:26:27 +0000 UTC" firstStartedPulling="2025-11-26 11:26:28.745590549 +0000 UTC m=+948.336802070" lastFinishedPulling="2025-11-26 11:26:33.837809646 +0000 UTC m=+953.429021168" observedRunningTime="2025-11-26 11:26:34.516482403 +0000 UTC m=+954.107693925" watchObservedRunningTime="2025-11-26 11:26:34.517073789 +0000 UTC m=+954.108285311" Nov 26 11:26:35 crc kubenswrapper[4622]: I1126 11:26:35.514194 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-569d458467-l59xr" event={"ID":"bf31826d-1923-4efc-9726-063b74b7f6e0","Type":"ContainerStarted","Data":"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f"} Nov 26 11:26:35 crc kubenswrapper[4622]: I1126 11:26:35.529072 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-569d458467-l59xr" podStartSLOduration=5.529040482 podStartE2EDuration="5.529040482s" podCreationTimestamp="2025-11-26 11:26:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:26:35.527755879 +0000 UTC m=+955.118967401" watchObservedRunningTime="2025-11-26 11:26:35.529040482 +0000 UTC m=+955.120252005" Nov 26 11:26:35 crc kubenswrapper[4622]: I1126 11:26:35.981514 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:36 crc kubenswrapper[4622]: I1126 11:26:36.532262 4622 generic.go:334] "Generic (PLEG): container finished" podID="d1451a45-9875-4a5e-b265-bd0c240cb628" containerID="e88e8776f1350ebc4e309ec3edf5a00865c3a95626d0ac0353f5d9a1ffe78c59" exitCode=0 Nov 26 11:26:36 crc kubenswrapper[4622]: I1126 11:26:36.532628 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mwptb" event={"ID":"d1451a45-9875-4a5e-b265-bd0c240cb628","Type":"ContainerDied","Data":"e88e8776f1350ebc4e309ec3edf5a00865c3a95626d0ac0353f5d9a1ffe78c59"} Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.809956 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.881167 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnb5h\" (UniqueName: \"kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h\") pod \"d1451a45-9875-4a5e-b265-bd0c240cb628\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.881233 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle\") pod \"d1451a45-9875-4a5e-b265-bd0c240cb628\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.881348 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data\") pod \"d1451a45-9875-4a5e-b265-bd0c240cb628\" (UID: \"d1451a45-9875-4a5e-b265-bd0c240cb628\") " Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.886229 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h" (OuterVolumeSpecName: "kube-api-access-lnb5h") pod "d1451a45-9875-4a5e-b265-bd0c240cb628" (UID: "d1451a45-9875-4a5e-b265-bd0c240cb628"). InnerVolumeSpecName "kube-api-access-lnb5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.900483 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1451a45-9875-4a5e-b265-bd0c240cb628" (UID: "d1451a45-9875-4a5e-b265-bd0c240cb628"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.911799 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data" (OuterVolumeSpecName: "config-data") pod "d1451a45-9875-4a5e-b265-bd0c240cb628" (UID: "d1451a45-9875-4a5e-b265-bd0c240cb628"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.983906 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.984083 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1451a45-9875-4a5e-b265-bd0c240cb628-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:37 crc kubenswrapper[4622]: I1126 11:26:37.984178 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnb5h\" (UniqueName: \"kubernetes.io/projected/d1451a45-9875-4a5e-b265-bd0c240cb628-kube-api-access-lnb5h\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.547981 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mwptb" event={"ID":"d1451a45-9875-4a5e-b265-bd0c240cb628","Type":"ContainerDied","Data":"9fa73c07a223360cec3c467f001d023169ea953461bac705bf0726fbc3a6dc94"} Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.548030 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fa73c07a223360cec3c467f001d023169ea953461bac705bf0726fbc3a6dc94" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.548030 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mwptb" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.792629 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.793018 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-569d458467-l59xr" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="dnsmasq-dns" containerID="cri-o://a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f" gracePeriod=10 Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834189 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834545 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d731d848-aaea-425b-be07-5e852d58a45d" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834560 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d731d848-aaea-425b-be07-5e852d58a45d" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834569 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87d2790-eea1-4fb5-8f6c-fb4931e66b28" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834575 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87d2790-eea1-4fb5-8f6c-fb4931e66b28" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834598 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c071132-6163-40db-92b0-121c2d362a69" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834605 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c071132-6163-40db-92b0-121c2d362a69" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834618 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b537dfd7-4088-4292-bf23-aedf0e2bd0df" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834624 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b537dfd7-4088-4292-bf23-aedf0e2bd0df" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834633 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690c5cab-acaa-440f-803e-5c8ae6705486" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834638 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="690c5cab-acaa-440f-803e-5c8ae6705486" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834646 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1058139e-d30c-419c-9819-91c5418144fa" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834652 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1058139e-d30c-419c-9819-91c5418144fa" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: E1126 11:26:38.834662 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1451a45-9875-4a5e-b265-bd0c240cb628" containerName="keystone-db-sync" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834667 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1451a45-9875-4a5e-b265-bd0c240cb628" containerName="keystone-db-sync" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834822 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b537dfd7-4088-4292-bf23-aedf0e2bd0df" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834835 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1058139e-d30c-419c-9819-91c5418144fa" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834848 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d731d848-aaea-425b-be07-5e852d58a45d" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834856 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1451a45-9875-4a5e-b265-bd0c240cb628" containerName="keystone-db-sync" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834861 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c071132-6163-40db-92b0-121c2d362a69" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834867 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a87d2790-eea1-4fb5-8f6c-fb4931e66b28" containerName="mariadb-account-create-update" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.834877 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="690c5cab-acaa-440f-803e-5c8ae6705486" containerName="mariadb-database-create" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.835667 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.843221 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.886548 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fl6xb"] Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.887617 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.892781 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.892926 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.896561 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.904965 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gr2p8" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906015 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906103 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52c2r\" (UniqueName: \"kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906161 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906239 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.906488 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 26 11:26:38 crc kubenswrapper[4622]: I1126 11:26:38.917443 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fl6xb"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008622 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008654 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008681 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52c2r\" (UniqueName: \"kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008716 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008742 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008801 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008819 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008842 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008869 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.008898 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6hb7\" (UniqueName: \"kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.009687 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.010454 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.010983 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.011265 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.068332 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52c2r\" (UniqueName: \"kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r\") pod \"dnsmasq-dns-b76c757b7-2j8ct\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.068681 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sq2s5"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.069711 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.088326 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.088666 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.108852 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sq2s5"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110407 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110453 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110493 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6hb7\" (UniqueName: \"kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110647 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110678 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.110721 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.125104 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.127211 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.133695 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.134040 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-grv44" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.134484 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.134955 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.161892 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.202085 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6hb7\" (UniqueName: \"kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7\") pod \"keystone-bootstrap-fl6xb\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.215876 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.215916 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.215996 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.216027 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.216150 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.216166 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsct9\" (UniqueName: \"kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.226402 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kfrqh"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.227514 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.230158 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.232894 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-th2pj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.232937 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kfrqh"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.233054 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.260259 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.297975 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.300185 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.309064 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.309291 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.310045 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-5tq87" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.310224 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.325799 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.332583 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.332719 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvqbw\" (UniqueName: \"kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.332870 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.332965 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.333069 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.333413 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.333475 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.333537 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsct9\" (UniqueName: \"kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.334060 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.361890 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.366472 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.370804 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsct9\" (UniqueName: \"kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.373465 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.380577 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle\") pod \"cinder-db-sync-sq2s5\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.405915 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.412354 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.420899 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.421105 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.421569 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.426482 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.437745 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.437901 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.437936 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.437973 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.438048 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.438187 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvqbw\" (UniqueName: \"kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.438270 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq44h\" (UniqueName: \"kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.438312 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.458927 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.463452 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.467449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvqbw\" (UniqueName: \"kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw\") pod \"neutron-db-sync-kfrqh\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.472922 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.485975 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.505323 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.523917 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-j8fgr"] Nov 26 11:26:39 crc kubenswrapper[4622]: E1126 11:26:39.524361 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="dnsmasq-dns" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.524378 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="dnsmasq-dns" Nov 26 11:26:39 crc kubenswrapper[4622]: E1126 11:26:39.524390 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="init" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.524396 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="init" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.524891 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerName="dnsmasq-dns" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.525553 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.529480 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s9gjm" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.529785 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.529894 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.531470 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.537091 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.538694 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.539767 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.539802 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlvzs\" (UniqueName: \"kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.539824 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.539918 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540070 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540132 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq44h\" (UniqueName: \"kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540167 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540281 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540325 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540356 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540446 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540487 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540585 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.540827 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.543072 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.543138 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-2mgt7"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.553441 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.553723 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.562585 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.562906 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.563018 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tpzsg" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.564430 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.567458 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-j8fgr"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.570708 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq44h\" (UniqueName: \"kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h\") pod \"horizon-868fc87bbf-49ntq\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.575131 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.577167 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.578457 4622 generic.go:334] "Generic (PLEG): container finished" podID="bf31826d-1923-4efc-9726-063b74b7f6e0" containerID="a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f" exitCode=0 Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.578511 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-569d458467-l59xr" event={"ID":"bf31826d-1923-4efc-9726-063b74b7f6e0","Type":"ContainerDied","Data":"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f"} Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.578545 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-569d458467-l59xr" event={"ID":"bf31826d-1923-4efc-9726-063b74b7f6e0","Type":"ContainerDied","Data":"7d77aac4fb5e37a18f96b16de4c61dce9b0e0109917625c85fd8874bb8e243d3"} Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.578563 4622 scope.go:117] "RemoveContainer" containerID="a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.578587 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-569d458467-l59xr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.602532 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2mgt7"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.641670 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb\") pod \"bf31826d-1923-4efc-9726-063b74b7f6e0\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.641793 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc\") pod \"bf31826d-1923-4efc-9726-063b74b7f6e0\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.641884 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb\") pod \"bf31826d-1923-4efc-9726-063b74b7f6e0\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.641960 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnx9j\" (UniqueName: \"kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j\") pod \"bf31826d-1923-4efc-9726-063b74b7f6e0\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.641988 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config\") pod \"bf31826d-1923-4efc-9726-063b74b7f6e0\" (UID: \"bf31826d-1923-4efc-9726-063b74b7f6e0\") " Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642171 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r59dr\" (UniqueName: \"kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642213 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642257 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642284 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps9m9\" (UniqueName: \"kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642313 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642331 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njlfs\" (UniqueName: \"kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642356 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642371 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642391 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642423 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlvzs\" (UniqueName: \"kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642438 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.642488 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644163 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644416 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644456 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644492 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644639 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644660 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644687 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mdln\" (UniqueName: \"kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644714 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644746 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644810 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644848 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644875 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.644933 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.645937 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.646106 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.649173 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j" (OuterVolumeSpecName: "kube-api-access-hnx9j") pod "bf31826d-1923-4efc-9726-063b74b7f6e0" (UID: "bf31826d-1923-4efc-9726-063b74b7f6e0"). InnerVolumeSpecName "kube-api-access-hnx9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.650412 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.653001 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.653879 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.654651 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.657069 4622 scope.go:117] "RemoveContainer" containerID="ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.662044 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlvzs\" (UniqueName: \"kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs\") pod \"ceilometer-0\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.686912 4622 scope.go:117] "RemoveContainer" containerID="a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f" Nov 26 11:26:39 crc kubenswrapper[4622]: E1126 11:26:39.688233 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f\": container with ID starting with a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f not found: ID does not exist" containerID="a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.688274 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f"} err="failed to get container status \"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f\": rpc error: code = NotFound desc = could not find container \"a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f\": container with ID starting with a4872b6541b9290bf8f72dc4224fcf33c3ee82110235f64dda0526d30285c27f not found: ID does not exist" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.688317 4622 scope.go:117] "RemoveContainer" containerID="ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82" Nov 26 11:26:39 crc kubenswrapper[4622]: E1126 11:26:39.688763 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82\": container with ID starting with ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82 not found: ID does not exist" containerID="ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.688795 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82"} err="failed to get container status \"ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82\": rpc error: code = NotFound desc = could not find container \"ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82\": container with ID starting with ac51c346442936ba0ec05d1687d5983148e4fe603cc49a45445f2d44ff8a9d82 not found: ID does not exist" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.689075 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.696184 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bf31826d-1923-4efc-9726-063b74b7f6e0" (UID: "bf31826d-1923-4efc-9726-063b74b7f6e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.696440 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config" (OuterVolumeSpecName: "config") pod "bf31826d-1923-4efc-9726-063b74b7f6e0" (UID: "bf31826d-1923-4efc-9726-063b74b7f6e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.697621 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bf31826d-1923-4efc-9726-063b74b7f6e0" (UID: "bf31826d-1923-4efc-9726-063b74b7f6e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.700959 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf31826d-1923-4efc-9726-063b74b7f6e0" (UID: "bf31826d-1923-4efc-9726-063b74b7f6e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.746849 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.746907 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.746940 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.746966 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747026 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r59dr\" (UniqueName: \"kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747088 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747113 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps9m9\" (UniqueName: \"kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747149 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747164 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njlfs\" (UniqueName: \"kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747191 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747204 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747227 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747338 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747367 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747395 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747415 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747433 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747447 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747545 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mdln\" (UniqueName: \"kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747669 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747682 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747736 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnx9j\" (UniqueName: \"kubernetes.io/projected/bf31826d-1923-4efc-9726-063b74b7f6e0-kube-api-access-hnx9j\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747746 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747756 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf31826d-1923-4efc-9726-063b74b7f6e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747765 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.747765 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.748688 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.748832 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.748955 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.749295 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.750829 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.751584 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.752368 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.752830 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.752836 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.754887 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.757277 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.760013 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.763895 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r59dr\" (UniqueName: \"kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr\") pod \"placement-db-sync-2mgt7\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.764416 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps9m9\" (UniqueName: \"kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9\") pod \"horizon-56c6d5c79c-x7zlj\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.766218 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mdln\" (UniqueName: \"kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln\") pod \"barbican-db-sync-j8fgr\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.770577 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njlfs\" (UniqueName: \"kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs\") pod \"dnsmasq-dns-66f4bdbdb7-bq6zq\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.786388 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:39 crc kubenswrapper[4622]: W1126 11:26:39.788632 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccd07a3e_037a_4f1c_a911_dde05e2dab1c.slice/crio-2159192a7583af004fc11a06c1981e1349666aff6a9278819d70cd26d42cd816 WatchSource:0}: Error finding container 2159192a7583af004fc11a06c1981e1349666aff6a9278819d70cd26d42cd816: Status 404 returned error can't find the container with id 2159192a7583af004fc11a06c1981e1349666aff6a9278819d70cd26d42cd816 Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.869095 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.885577 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.889469 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fl6xb"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.902870 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.914074 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:39 crc kubenswrapper[4622]: W1126 11:26:39.914058 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5b78849_9d6d_4849_b71d_a1ed2a94d78e.slice/crio-13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f WatchSource:0}: Error finding container 13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f: Status 404 returned error can't find the container with id 13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.923483 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-569d458467-l59xr"] Nov 26 11:26:39 crc kubenswrapper[4622]: I1126 11:26:39.927570 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2mgt7" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.018939 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sq2s5"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.142025 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kfrqh"] Nov 26 11:26:40 crc kubenswrapper[4622]: W1126 11:26:40.162556 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod351c2fe8_f231_428a_b0bf_bc8642091b55.slice/crio-d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105 WatchSource:0}: Error finding container d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105: Status 404 returned error can't find the container with id d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105 Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.242302 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.289617 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.373704 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-j8fgr"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.475059 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:26:40 crc kubenswrapper[4622]: W1126 11:26:40.535318 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfae52a64_3bac_4806_9497_1752f4d31307.slice/crio-e454790ad2d727f17cfeaa7943d1a765426828908cde0e847b3f2438ce7d6e02 WatchSource:0}: Error finding container e454790ad2d727f17cfeaa7943d1a765426828908cde0e847b3f2438ce7d6e02: Status 404 returned error can't find the container with id e454790ad2d727f17cfeaa7943d1a765426828908cde0e847b3f2438ce7d6e02 Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.549063 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.554291 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2mgt7"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.586270 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerStarted","Data":"eec5e58d1022ab50d72f5b49babb308768da57ad007939eab3dd7499231efe07"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.587677 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2mgt7" event={"ID":"6bd2cbb6-f985-402e-845c-ea4f8e2f970e","Type":"ContainerStarted","Data":"59798216ac77779bde4f61752218b7575ad51a29464a54e88d906b82519641d6"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.588970 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sq2s5" event={"ID":"57b0b68f-f25e-417e-ae0f-55d2361b8df6","Type":"ContainerStarted","Data":"72a08c3ebfbd0a9e6916694b857a013908a2ee3734415f41f42fe4096374db4a"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.591068 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" event={"ID":"01c222b2-900b-401d-8ec2-fb9889927110","Type":"ContainerStarted","Data":"e6c7d898c775ccf85d609340977127318c879e6ee17cf5fc8d1f95ec2925d6ac"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.591902 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j8fgr" event={"ID":"99de607d-316d-4435-b18a-c6eeb950da19","Type":"ContainerStarted","Data":"ba9f1c063a9a636f1389edfe16a5dd4506a2cc8c7d8afb8b711c81b48860e764"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.594810 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fl6xb" event={"ID":"a5b78849-9d6d-4849-b71d-a1ed2a94d78e","Type":"ContainerStarted","Data":"1f665d889e17bcc3173a9036e22e23d9ef982403ad36ef20072751070076b0ef"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.594844 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fl6xb" event={"ID":"a5b78849-9d6d-4849-b71d-a1ed2a94d78e","Type":"ContainerStarted","Data":"13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.599386 4622 generic.go:334] "Generic (PLEG): container finished" podID="ccd07a3e-037a-4f1c-a911-dde05e2dab1c" containerID="ce8a9d21300c7e4b1f7442bbb20458aad74acde4d40cad65ab0082867160c11d" exitCode=0 Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.599593 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" event={"ID":"ccd07a3e-037a-4f1c-a911-dde05e2dab1c","Type":"ContainerDied","Data":"ce8a9d21300c7e4b1f7442bbb20458aad74acde4d40cad65ab0082867160c11d"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.599719 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.599745 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" event={"ID":"ccd07a3e-037a-4f1c-a911-dde05e2dab1c","Type":"ContainerStarted","Data":"2159192a7583af004fc11a06c1981e1349666aff6a9278819d70cd26d42cd816"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.601936 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56c6d5c79c-x7zlj" event={"ID":"fae52a64-3bac-4806-9497-1752f4d31307","Type":"ContainerStarted","Data":"e454790ad2d727f17cfeaa7943d1a765426828908cde0e847b3f2438ce7d6e02"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.602988 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerStarted","Data":"cf0a8f2725737b32302119bfbefce4fcc0252abd138d91875c6611b2f6df4fa6"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.604172 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kfrqh" event={"ID":"351c2fe8-f231-428a-b0bf-bc8642091b55","Type":"ContainerStarted","Data":"59ddba8aff1d87dc060000a6ce208a22cd799cc69031d90bdba68314491abd62"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.604200 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kfrqh" event={"ID":"351c2fe8-f231-428a-b0bf-bc8642091b55","Type":"ContainerStarted","Data":"d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105"} Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.616954 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fl6xb" podStartSLOduration=2.616943075 podStartE2EDuration="2.616943075s" podCreationTimestamp="2025-11-26 11:26:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:26:40.614615784 +0000 UTC m=+960.205827316" watchObservedRunningTime="2025-11-26 11:26:40.616943075 +0000 UTC m=+960.208154597" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.643744 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.669017 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-kfrqh" podStartSLOduration=1.668999177 podStartE2EDuration="1.668999177s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:26:40.666272241 +0000 UTC m=+960.257483763" watchObservedRunningTime="2025-11-26 11:26:40.668999177 +0000 UTC m=+960.260210698" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.736606 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf31826d-1923-4efc-9726-063b74b7f6e0" path="/var/lib/kubelet/pods/bf31826d-1923-4efc-9726-063b74b7f6e0/volumes" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.857163 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.859556 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.985206 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52c2r\" (UniqueName: \"kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r\") pod \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.985580 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc\") pod \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.985615 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb\") pod \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.985686 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config\") pod \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.985795 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb\") pod \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\" (UID: \"ccd07a3e-037a-4f1c-a911-dde05e2dab1c\") " Nov 26 11:26:40 crc kubenswrapper[4622]: I1126 11:26:40.992417 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r" (OuterVolumeSpecName: "kube-api-access-52c2r") pod "ccd07a3e-037a-4f1c-a911-dde05e2dab1c" (UID: "ccd07a3e-037a-4f1c-a911-dde05e2dab1c"). InnerVolumeSpecName "kube-api-access-52c2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.051074 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ccd07a3e-037a-4f1c-a911-dde05e2dab1c" (UID: "ccd07a3e-037a-4f1c-a911-dde05e2dab1c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.059960 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ccd07a3e-037a-4f1c-a911-dde05e2dab1c" (UID: "ccd07a3e-037a-4f1c-a911-dde05e2dab1c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.064090 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ccd07a3e-037a-4f1c-a911-dde05e2dab1c" (UID: "ccd07a3e-037a-4f1c-a911-dde05e2dab1c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.084078 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config" (OuterVolumeSpecName: "config") pod "ccd07a3e-037a-4f1c-a911-dde05e2dab1c" (UID: "ccd07a3e-037a-4f1c-a911-dde05e2dab1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.095388 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52c2r\" (UniqueName: \"kubernetes.io/projected/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-kube-api-access-52c2r\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.095488 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.095590 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.095659 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.095717 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccd07a3e-037a-4f1c-a911-dde05e2dab1c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.159058 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.174474 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.216626 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:26:41 crc kubenswrapper[4622]: E1126 11:26:41.217993 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd07a3e-037a-4f1c-a911-dde05e2dab1c" containerName="init" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.218015 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd07a3e-037a-4f1c-a911-dde05e2dab1c" containerName="init" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.218194 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd07a3e-037a-4f1c-a911-dde05e2dab1c" containerName="init" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.219033 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.250930 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.301806 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.302002 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.302052 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tpqm\" (UniqueName: \"kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.302297 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.302331 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.406599 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tpqm\" (UniqueName: \"kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.406747 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.406776 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.406892 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.406975 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.409948 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.410596 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.410919 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.412103 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.434035 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tpqm\" (UniqueName: \"kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm\") pod \"horizon-76c94b49cc-l575g\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.536540 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.637581 4622 generic.go:334] "Generic (PLEG): container finished" podID="01c222b2-900b-401d-8ec2-fb9889927110" containerID="e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c" exitCode=0 Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.637908 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" event={"ID":"01c222b2-900b-401d-8ec2-fb9889927110","Type":"ContainerDied","Data":"e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c"} Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.641339 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" event={"ID":"ccd07a3e-037a-4f1c-a911-dde05e2dab1c","Type":"ContainerDied","Data":"2159192a7583af004fc11a06c1981e1349666aff6a9278819d70cd26d42cd816"} Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.641408 4622 scope.go:117] "RemoveContainer" containerID="ce8a9d21300c7e4b1f7442bbb20458aad74acde4d40cad65ab0082867160c11d" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.641485 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b76c757b7-2j8ct" Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.756846 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:41 crc kubenswrapper[4622]: I1126 11:26:41.773406 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b76c757b7-2j8ct"] Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.117342 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:26:42 crc kubenswrapper[4622]: W1126 11:26:42.151319 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ec3ddcf_99b7_4529_ab0a_ac52a138edf2.slice/crio-b6ab66a642764da6543f23bf2302c003a077787c5a9967acb56a34c55b690d63 WatchSource:0}: Error finding container b6ab66a642764da6543f23bf2302c003a077787c5a9967acb56a34c55b690d63: Status 404 returned error can't find the container with id b6ab66a642764da6543f23bf2302c003a077787c5a9967acb56a34c55b690d63 Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.668814 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" event={"ID":"01c222b2-900b-401d-8ec2-fb9889927110","Type":"ContainerStarted","Data":"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47"} Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.669579 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.673168 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerStarted","Data":"b6ab66a642764da6543f23bf2302c003a077787c5a9967acb56a34c55b690d63"} Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.673261 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r44l7" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="registry-server" containerID="cri-o://173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd" gracePeriod=2 Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.691840 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" podStartSLOduration=3.691821068 podStartE2EDuration="3.691821068s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:26:42.684546833 +0000 UTC m=+962.275758365" watchObservedRunningTime="2025-11-26 11:26:42.691821068 +0000 UTC m=+962.283032590" Nov 26 11:26:42 crc kubenswrapper[4622]: I1126 11:26:42.735532 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd07a3e-037a-4f1c-a911-dde05e2dab1c" path="/var/lib/kubelet/pods/ccd07a3e-037a-4f1c-a911-dde05e2dab1c/volumes" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.241532 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.418213 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqpmx\" (UniqueName: \"kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx\") pod \"4c339f88-a427-4951-ae98-3fe33469924b\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.418398 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities\") pod \"4c339f88-a427-4951-ae98-3fe33469924b\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.418440 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content\") pod \"4c339f88-a427-4951-ae98-3fe33469924b\" (UID: \"4c339f88-a427-4951-ae98-3fe33469924b\") " Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.419270 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities" (OuterVolumeSpecName: "utilities") pod "4c339f88-a427-4951-ae98-3fe33469924b" (UID: "4c339f88-a427-4951-ae98-3fe33469924b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.424664 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx" (OuterVolumeSpecName: "kube-api-access-nqpmx") pod "4c339f88-a427-4951-ae98-3fe33469924b" (UID: "4c339f88-a427-4951-ae98-3fe33469924b"). InnerVolumeSpecName "kube-api-access-nqpmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.491710 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c339f88-a427-4951-ae98-3fe33469924b" (UID: "4c339f88-a427-4951-ae98-3fe33469924b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.520991 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqpmx\" (UniqueName: \"kubernetes.io/projected/4c339f88-a427-4951-ae98-3fe33469924b-kube-api-access-nqpmx\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.521243 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.521320 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c339f88-a427-4951-ae98-3fe33469924b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.691544 4622 generic.go:334] "Generic (PLEG): container finished" podID="4c339f88-a427-4951-ae98-3fe33469924b" containerID="173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd" exitCode=0 Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.691610 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerDied","Data":"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd"} Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.691642 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r44l7" event={"ID":"4c339f88-a427-4951-ae98-3fe33469924b","Type":"ContainerDied","Data":"b5130630e26ae4bbe092b44443632d79aa67649eedc9ab8b92c5b7aaf89c66de"} Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.691659 4622 scope.go:117] "RemoveContainer" containerID="173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.691785 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r44l7" Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.694646 4622 generic.go:334] "Generic (PLEG): container finished" podID="a5b78849-9d6d-4849-b71d-a1ed2a94d78e" containerID="1f665d889e17bcc3173a9036e22e23d9ef982403ad36ef20072751070076b0ef" exitCode=0 Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.695599 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fl6xb" event={"ID":"a5b78849-9d6d-4849-b71d-a1ed2a94d78e","Type":"ContainerDied","Data":"1f665d889e17bcc3173a9036e22e23d9ef982403ad36ef20072751070076b0ef"} Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.739092 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:43 crc kubenswrapper[4622]: I1126 11:26:43.745468 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r44l7"] Nov 26 11:26:44 crc kubenswrapper[4622]: I1126 11:26:44.715843 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c339f88-a427-4951-ae98-3fe33469924b" path="/var/lib/kubelet/pods/4c339f88-a427-4951-ae98-3fe33469924b/volumes" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.671561 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.709024 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:26:47 crc kubenswrapper[4622]: E1126 11:26:47.709401 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="extract-content" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.709422 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="extract-content" Nov 26 11:26:47 crc kubenswrapper[4622]: E1126 11:26:47.709452 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="extract-utilities" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.709457 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="extract-utilities" Nov 26 11:26:47 crc kubenswrapper[4622]: E1126 11:26:47.709480 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="registry-server" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.709485 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="registry-server" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.709692 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c339f88-a427-4951-ae98-3fe33469924b" containerName="registry-server" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.710612 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.712780 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.747062 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.765258 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.790168 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-bc6fbfc58-jv7cz"] Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.791687 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.806630 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bc6fbfc58-jv7cz"] Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824282 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2285a60b-0202-4abd-91de-7241e109804f-logs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824404 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-tls-certs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824425 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824464 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-secret-key\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824564 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824586 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824619 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824671 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-combined-ca-bundle\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824692 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-scripts\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824757 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pjb2\" (UniqueName: \"kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824796 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.824814 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.826061 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5g7g\" (UniqueName: \"kubernetes.io/projected/2285a60b-0202-4abd-91de-7241e109804f-kube-api-access-k5g7g\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.826114 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-config-data\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927743 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-tls-certs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927790 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927817 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-secret-key\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927851 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927873 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927894 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927917 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-combined-ca-bundle\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927936 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-scripts\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927963 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pjb2\" (UniqueName: \"kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927982 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.927999 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.928030 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5g7g\" (UniqueName: \"kubernetes.io/projected/2285a60b-0202-4abd-91de-7241e109804f-kube-api-access-k5g7g\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.928048 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-config-data\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.928100 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2285a60b-0202-4abd-91de-7241e109804f-logs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.928631 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2285a60b-0202-4abd-91de-7241e109804f-logs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.929044 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.929760 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-scripts\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.930110 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2285a60b-0202-4abd-91de-7241e109804f-config-data\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.932844 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.934363 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.934898 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-combined-ca-bundle\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.935342 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-tls-certs\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.935915 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.939565 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.941966 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.945721 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pjb2\" (UniqueName: \"kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2\") pod \"horizon-686b8b87c4-gqg7z\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.946267 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5g7g\" (UniqueName: \"kubernetes.io/projected/2285a60b-0202-4abd-91de-7241e109804f-kube-api-access-k5g7g\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:47 crc kubenswrapper[4622]: I1126 11:26:47.950788 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2285a60b-0202-4abd-91de-7241e109804f-horizon-secret-key\") pod \"horizon-bc6fbfc58-jv7cz\" (UID: \"2285a60b-0202-4abd-91de-7241e109804f\") " pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:48 crc kubenswrapper[4622]: I1126 11:26:48.029244 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:26:48 crc kubenswrapper[4622]: I1126 11:26:48.126118 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:26:48 crc kubenswrapper[4622]: I1126 11:26:48.753252 4622 generic.go:334] "Generic (PLEG): container finished" podID="351c2fe8-f231-428a-b0bf-bc8642091b55" containerID="59ddba8aff1d87dc060000a6ce208a22cd799cc69031d90bdba68314491abd62" exitCode=0 Nov 26 11:26:48 crc kubenswrapper[4622]: I1126 11:26:48.753296 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kfrqh" event={"ID":"351c2fe8-f231-428a-b0bf-bc8642091b55","Type":"ContainerDied","Data":"59ddba8aff1d87dc060000a6ce208a22cd799cc69031d90bdba68314491abd62"} Nov 26 11:26:49 crc kubenswrapper[4622]: I1126 11:26:49.906735 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:26:49 crc kubenswrapper[4622]: I1126 11:26:49.973715 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:26:49 crc kubenswrapper[4622]: I1126 11:26:49.973986 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" containerID="cri-o://5d6e364f5d8aac961d5db3f6ff3e6343cc678fab8a13e530a04e91ea70b3d6e4" gracePeriod=10 Nov 26 11:26:50 crc kubenswrapper[4622]: I1126 11:26:50.770121 4622 generic.go:334] "Generic (PLEG): container finished" podID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerID="5d6e364f5d8aac961d5db3f6ff3e6343cc678fab8a13e530a04e91ea70b3d6e4" exitCode=0 Nov 26 11:26:50 crc kubenswrapper[4622]: I1126 11:26:50.770211 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" event={"ID":"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4","Type":"ContainerDied","Data":"5d6e364f5d8aac961d5db3f6ff3e6343cc678fab8a13e530a04e91ea70b3d6e4"} Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.079635 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.189458 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.189531 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.189603 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.189703 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6hb7\" (UniqueName: \"kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.189860 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.190242 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts\") pod \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\" (UID: \"a5b78849-9d6d-4849-b71d-a1ed2a94d78e\") " Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.195649 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.196184 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts" (OuterVolumeSpecName: "scripts") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.196339 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.196456 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7" (OuterVolumeSpecName: "kube-api-access-g6hb7") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "kube-api-access-g6hb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.213070 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.214594 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data" (OuterVolumeSpecName: "config-data") pod "a5b78849-9d6d-4849-b71d-a1ed2a94d78e" (UID: "a5b78849-9d6d-4849-b71d-a1ed2a94d78e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293470 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293573 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293587 4622 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293598 4622 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293610 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6hb7\" (UniqueName: \"kubernetes.io/projected/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-kube-api-access-g6hb7\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.293620 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b78849-9d6d-4849-b71d-a1ed2a94d78e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.782217 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fl6xb" event={"ID":"a5b78849-9d6d-4849-b71d-a1ed2a94d78e","Type":"ContainerDied","Data":"13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f"} Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.782281 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13c1dccb2152c9e40fc30a9a99e3f82e5235bd9b3013740a9928eb6d4f80eb2f" Nov 26 11:26:51 crc kubenswrapper[4622]: I1126 11:26:51.782361 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fl6xb" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.162770 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fl6xb"] Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.169548 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fl6xb"] Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.256413 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-s866n"] Nov 26 11:26:52 crc kubenswrapper[4622]: E1126 11:26:52.256942 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5b78849-9d6d-4849-b71d-a1ed2a94d78e" containerName="keystone-bootstrap" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.256966 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5b78849-9d6d-4849-b71d-a1ed2a94d78e" containerName="keystone-bootstrap" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.257174 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5b78849-9d6d-4849-b71d-a1ed2a94d78e" containerName="keystone-bootstrap" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.257869 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.262090 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.262210 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.262333 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gr2p8" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.262525 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.263141 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.272787 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-s866n"] Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317357 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317533 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317597 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317649 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpwvd\" (UniqueName: \"kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317704 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.317742 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.419746 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.419894 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.419982 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpwvd\" (UniqueName: \"kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.420058 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.420373 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.420813 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.426454 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.427167 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.428554 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.428675 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.429299 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.435916 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpwvd\" (UniqueName: \"kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd\") pod \"keystone-bootstrap-s866n\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.576404 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s866n" Nov 26 11:26:52 crc kubenswrapper[4622]: I1126 11:26:52.717236 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5b78849-9d6d-4849-b71d-a1ed2a94d78e" path="/var/lib/kubelet/pods/a5b78849-9d6d-4849-b71d-a1ed2a94d78e/volumes" Nov 26 11:26:53 crc kubenswrapper[4622]: I1126 11:26:53.574049 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.108:5353: connect: connection refused" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.819792 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api@sha256:7dd2e0dbb6bb5a6cecd1763e43479ca8cb6a0c502534e83c8795c0da2b50e099" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.819986 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:7dd2e0dbb6bb5a6cecd1763e43479ca8cb6a0c502534e83c8795c0da2b50e099,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r59dr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-2mgt7_openstack(6bd2cbb6-f985-402e-845c-ea4f8e2f970e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.821163 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-2mgt7" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.833927 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:65b94ff9fcd486845fb0544583bf2a973246a61a0ad32340fb92d632285f1057" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.834366 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:65b94ff9fcd486845fb0544583bf2a973246a61a0ad32340fb92d632285f1057,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n79h674h66fh588hf5h54dh4h647h589h68dhc4h574h89hc8h567hb4h5bh5bdhddh545hbch5c8h5d6h66dh5f5h5b7h94h69h59fh666h75h659q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ps9m9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-56c6d5c79c-x7zlj_openstack(fae52a64-3bac-4806-9497-1752f4d31307): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 26 11:26:54 crc kubenswrapper[4622]: E1126 11:26:54.837836 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:65b94ff9fcd486845fb0544583bf2a973246a61a0ad32340fb92d632285f1057\\\"\"]" pod="openstack/horizon-56c6d5c79c-x7zlj" podUID="fae52a64-3bac-4806-9497-1752f4d31307" Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.885590 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.973572 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config\") pod \"351c2fe8-f231-428a-b0bf-bc8642091b55\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.973627 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvqbw\" (UniqueName: \"kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw\") pod \"351c2fe8-f231-428a-b0bf-bc8642091b55\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.973707 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle\") pod \"351c2fe8-f231-428a-b0bf-bc8642091b55\" (UID: \"351c2fe8-f231-428a-b0bf-bc8642091b55\") " Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.979485 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw" (OuterVolumeSpecName: "kube-api-access-dvqbw") pod "351c2fe8-f231-428a-b0bf-bc8642091b55" (UID: "351c2fe8-f231-428a-b0bf-bc8642091b55"). InnerVolumeSpecName "kube-api-access-dvqbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:26:54 crc kubenswrapper[4622]: I1126 11:26:54.994899 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config" (OuterVolumeSpecName: "config") pod "351c2fe8-f231-428a-b0bf-bc8642091b55" (UID: "351c2fe8-f231-428a-b0bf-bc8642091b55"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.011087 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "351c2fe8-f231-428a-b0bf-bc8642091b55" (UID: "351c2fe8-f231-428a-b0bf-bc8642091b55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.075215 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.075247 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvqbw\" (UniqueName: \"kubernetes.io/projected/351c2fe8-f231-428a-b0bf-bc8642091b55-kube-api-access-dvqbw\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.075258 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/351c2fe8-f231-428a-b0bf-bc8642091b55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.815987 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kfrqh" event={"ID":"351c2fe8-f231-428a-b0bf-bc8642091b55","Type":"ContainerDied","Data":"d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105"} Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.816311 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6713a290ba6a91000f286cf02b8700116a08747b0653a17c189eb88a641d105" Nov 26 11:26:55 crc kubenswrapper[4622]: I1126 11:26:55.816037 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kfrqh" Nov 26 11:26:55 crc kubenswrapper[4622]: E1126 11:26:55.818014 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api@sha256:7dd2e0dbb6bb5a6cecd1763e43479ca8cb6a0c502534e83c8795c0da2b50e099\\\"\"" pod="openstack/placement-db-sync-2mgt7" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.106858 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:26:56 crc kubenswrapper[4622]: E1126 11:26:56.107310 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="351c2fe8-f231-428a-b0bf-bc8642091b55" containerName="neutron-db-sync" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.107327 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="351c2fe8-f231-428a-b0bf-bc8642091b55" containerName="neutron-db-sync" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.107546 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="351c2fe8-f231-428a-b0bf-bc8642091b55" containerName="neutron-db-sync" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.111794 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.122604 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.195621 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.195668 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.195729 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.196176 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v587\" (UniqueName: \"kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.196274 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.261017 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.262925 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.264644 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.268864 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-th2pj" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.269052 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.269185 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.271841 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298124 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298174 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298224 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298285 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298318 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298416 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298443 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298477 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v587\" (UniqueName: \"kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298538 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.298582 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h4kd\" (UniqueName: \"kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.299109 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.299143 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.299916 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.300177 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.314596 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v587\" (UniqueName: \"kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587\") pod \"dnsmasq-dns-6677d66f85-cm5jg\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.399815 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.399951 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.399985 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.400088 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h4kd\" (UniqueName: \"kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.400237 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.403981 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.404287 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.404312 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.404744 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.413233 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h4kd\" (UniqueName: \"kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd\") pod \"neutron-78d76c8f88-kbpmq\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.450377 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:26:56 crc kubenswrapper[4622]: I1126 11:26:56.582847 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.717434 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b5456649c-2jjn8"] Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.718960 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.720522 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b5456649c-2jjn8"] Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.721049 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.721335 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.747911 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-combined-ca-bundle\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.747965 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-internal-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.748013 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x8jq\" (UniqueName: \"kubernetes.io/projected/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-kube-api-access-4x8jq\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.748048 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-public-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.748241 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.748350 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-ovndb-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.748369 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-httpd-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850286 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850344 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-ovndb-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850366 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-httpd-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850464 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-combined-ca-bundle\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850495 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-internal-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850549 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x8jq\" (UniqueName: \"kubernetes.io/projected/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-kube-api-access-4x8jq\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.850581 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-public-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.856379 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-httpd-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.856413 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-internal-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.856424 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-combined-ca-bundle\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.856776 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-public-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.857522 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-ovndb-tls-certs\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.857866 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-config\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:58 crc kubenswrapper[4622]: I1126 11:26:58.866282 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x8jq\" (UniqueName: \"kubernetes.io/projected/86ef3a8d-79f3-49f4-8e47-cf74f7618f66-kube-api-access-4x8jq\") pod \"neutron-6b5456649c-2jjn8\" (UID: \"86ef3a8d-79f3-49f4-8e47-cf74f7618f66\") " pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:26:59 crc kubenswrapper[4622]: I1126 11:26:59.054798 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.485310 4622 scope.go:117] "RemoveContainer" containerID="97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.548794 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.611558 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcd5p\" (UniqueName: \"kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p\") pod \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.611637 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb\") pod \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.611731 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb\") pod \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.611754 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc\") pod \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.611777 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config\") pod \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\" (UID: \"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4\") " Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.624073 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p" (OuterVolumeSpecName: "kube-api-access-xcd5p") pod "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" (UID: "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4"). InnerVolumeSpecName "kube-api-access-xcd5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.645735 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" (UID: "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.646233 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" (UID: "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.657181 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config" (OuterVolumeSpecName: "config") pod "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" (UID: "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.662894 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" (UID: "5b9f8eb9-3de8-420c-95a7-af73fc7b33b4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.715122 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.715218 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.715234 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.715250 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcd5p\" (UniqueName: \"kubernetes.io/projected/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-kube-api-access-xcd5p\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.715264 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.872667 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.872674 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" event={"ID":"5b9f8eb9-3de8-420c-95a7-af73fc7b33b4","Type":"ContainerDied","Data":"d52db22aeabd63db6b2848bb8182dcea5b14a5187f83467672d30309bc1b8797"} Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.908012 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.916796 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c476d78c5-44ndx"] Nov 26 11:27:01 crc kubenswrapper[4622]: E1126 11:27:01.932082 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:4c93a5cccb9971e24f05daf93b3aa11ba71752bc3469a1a1a2c4906f92f69645" Nov 26 11:27:01 crc kubenswrapper[4622]: E1126 11:27:01.932290 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:4c93a5cccb9971e24f05daf93b3aa11ba71752bc3469a1a1a2c4906f92f69645,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6mdln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-j8fgr_openstack(99de607d-316d-4435-b18a-c6eeb950da19): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 26 11:27:01 crc kubenswrapper[4622]: E1126 11:27:01.933395 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-j8fgr" podUID="99de607d-316d-4435-b18a-c6eeb950da19" Nov 26 11:27:01 crc kubenswrapper[4622]: I1126 11:27:01.942738 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.019902 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps9m9\" (UniqueName: \"kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9\") pod \"fae52a64-3bac-4806-9497-1752f4d31307\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.020175 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data\") pod \"fae52a64-3bac-4806-9497-1752f4d31307\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.020204 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs\") pod \"fae52a64-3bac-4806-9497-1752f4d31307\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.020362 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key\") pod \"fae52a64-3bac-4806-9497-1752f4d31307\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.020414 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts\") pod \"fae52a64-3bac-4806-9497-1752f4d31307\" (UID: \"fae52a64-3bac-4806-9497-1752f4d31307\") " Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.020984 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs" (OuterVolumeSpecName: "logs") pod "fae52a64-3bac-4806-9497-1752f4d31307" (UID: "fae52a64-3bac-4806-9497-1752f4d31307"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.021703 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data" (OuterVolumeSpecName: "config-data") pod "fae52a64-3bac-4806-9497-1752f4d31307" (UID: "fae52a64-3bac-4806-9497-1752f4d31307"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.022186 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts" (OuterVolumeSpecName: "scripts") pod "fae52a64-3bac-4806-9497-1752f4d31307" (UID: "fae52a64-3bac-4806-9497-1752f4d31307"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.026551 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9" (OuterVolumeSpecName: "kube-api-access-ps9m9") pod "fae52a64-3bac-4806-9497-1752f4d31307" (UID: "fae52a64-3bac-4806-9497-1752f4d31307"). InnerVolumeSpecName "kube-api-access-ps9m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.027061 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fae52a64-3bac-4806-9497-1752f4d31307" (UID: "fae52a64-3bac-4806-9497-1752f4d31307"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.121573 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.121728 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fae52a64-3bac-4806-9497-1752f4d31307-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.121820 4622 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fae52a64-3bac-4806-9497-1752f4d31307-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.121884 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fae52a64-3bac-4806-9497-1752f4d31307-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.121951 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps9m9\" (UniqueName: \"kubernetes.io/projected/fae52a64-3bac-4806-9497-1752f4d31307-kube-api-access-ps9m9\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.714244 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" path="/var/lib/kubelet/pods/5b9f8eb9-3de8-420c-95a7-af73fc7b33b4/volumes" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.755941 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:37d64e0a00c54e71a4c1fcbbbf7e832f6886ffd03c9a02b6ee3ca48fabc30879" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.756125 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:37d64e0a00c54e71a4c1fcbbbf7e832f6886ffd03c9a02b6ee3ca48fabc30879,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vsct9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-sq2s5_openstack(57b0b68f-f25e-417e-ae0f-55d2361b8df6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.757484 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-sq2s5" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.791025 4622 scope.go:117] "RemoveContainer" containerID="eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.916666 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56c6d5c79c-x7zlj" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.916731 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56c6d5c79c-x7zlj" event={"ID":"fae52a64-3bac-4806-9497-1752f4d31307","Type":"ContainerDied","Data":"e454790ad2d727f17cfeaa7943d1a765426828908cde0e847b3f2438ce7d6e02"} Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.922815 4622 scope.go:117] "RemoveContainer" containerID="173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.923355 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd\": container with ID starting with 173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd not found: ID does not exist" containerID="173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.923392 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd"} err="failed to get container status \"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd\": rpc error: code = NotFound desc = could not find container \"173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd\": container with ID starting with 173617202ec041b2a2e516a32e3e6d30336845b726e0f3586e26d933c2a299bd not found: ID does not exist" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.923415 4622 scope.go:117] "RemoveContainer" containerID="97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.923620 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:4c93a5cccb9971e24f05daf93b3aa11ba71752bc3469a1a1a2c4906f92f69645\\\"\"" pod="openstack/barbican-db-sync-j8fgr" podUID="99de607d-316d-4435-b18a-c6eeb950da19" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.923677 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1\": container with ID starting with 97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1 not found: ID does not exist" containerID="97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.923707 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:37d64e0a00c54e71a4c1fcbbbf7e832f6886ffd03c9a02b6ee3ca48fabc30879\\\"\"" pod="openstack/cinder-db-sync-sq2s5" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.923715 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1"} err="failed to get container status \"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1\": rpc error: code = NotFound desc = could not find container \"97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1\": container with ID starting with 97be94336e042df08a34ff8fdfd755d8744de662b262291d72163feffa610ff1 not found: ID does not exist" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.923742 4622 scope.go:117] "RemoveContainer" containerID="eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f" Nov 26 11:27:02 crc kubenswrapper[4622]: E1126 11:27:02.924193 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f\": container with ID starting with eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f not found: ID does not exist" containerID="eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.924222 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f"} err="failed to get container status \"eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f\": rpc error: code = NotFound desc = could not find container \"eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f\": container with ID starting with eaf5a80b758aeed16854c20ca0661cb3202c5a9fbf20f8f7eaba325e5a63657f not found: ID does not exist" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.924238 4622 scope.go:117] "RemoveContainer" containerID="5d6e364f5d8aac961d5db3f6ff3e6343cc678fab8a13e530a04e91ea70b3d6e4" Nov 26 11:27:02 crc kubenswrapper[4622]: I1126 11:27:02.967908 4622 scope.go:117] "RemoveContainer" containerID="9fbf402d2fc3e031a7ccc56c7097aa3524374c4b432d7196cc0fe28600c6638c" Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.033658 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.041244 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-56c6d5c79c-x7zlj"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.309764 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:27:03 crc kubenswrapper[4622]: W1126 11:27:03.312004 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b663a5_e13e_4653_a4bb_340952a968c9.slice/crio-b2cb0cbcbebd97542e946031d9909db16c75abf625bd93306c5d0c28ad369d4a WatchSource:0}: Error finding container b2cb0cbcbebd97542e946031d9909db16c75abf625bd93306c5d0c28ad369d4a: Status 404 returned error can't find the container with id b2cb0cbcbebd97542e946031d9909db16c75abf625bd93306c5d0c28ad369d4a Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.517018 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-s866n"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.525908 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bc6fbfc58-jv7cz"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.535310 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.574604 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c476d78c5-44ndx" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.108:5353: i/o timeout" Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.615369 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.937265 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerStarted","Data":"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.939628 4622 generic.go:334] "Generic (PLEG): container finished" podID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerID="ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1" exitCode=0 Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.939679 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" event={"ID":"0f5a5dad-9d9e-439d-9946-43da9af3caf2","Type":"ContainerDied","Data":"ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.939699 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" event={"ID":"0f5a5dad-9d9e-439d-9946-43da9af3caf2","Type":"ContainerStarted","Data":"94781550ec460540bbdffb095bc0344cc94fdca2e36777b915fa1ae31b8cb67b"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.955828 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerStarted","Data":"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.955897 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerStarted","Data":"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.955988 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-76c94b49cc-l575g" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon-log" containerID="cri-o://00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" gracePeriod=30 Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.956109 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-76c94b49cc-l575g" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon" containerID="cri-o://ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" gracePeriod=30 Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.966633 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-868fc87bbf-49ntq" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon" containerID="cri-o://24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" gracePeriod=30 Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.966634 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-868fc87bbf-49ntq" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon-log" containerID="cri-o://a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" gracePeriod=30 Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.970530 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerStarted","Data":"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.970585 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerStarted","Data":"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.970598 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s866n" event={"ID":"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1","Type":"ContainerStarted","Data":"4b4b4dc95ecc57fcd244dcf874d755e7b0c55f3a0d6b2066b31c5b6b9ddfdef8"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.970609 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s866n" event={"ID":"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1","Type":"ContainerStarted","Data":"5f14434bcdecd770189839cb686798fbda78f28764a45589b42aa25ab6a79c9d"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.975893 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-76c94b49cc-l575g" podStartSLOduration=2.399275129 podStartE2EDuration="22.975881337s" podCreationTimestamp="2025-11-26 11:26:41 +0000 UTC" firstStartedPulling="2025-11-26 11:26:42.178634574 +0000 UTC m=+961.769846096" lastFinishedPulling="2025-11-26 11:27:02.755240782 +0000 UTC m=+982.346452304" observedRunningTime="2025-11-26 11:27:03.973736851 +0000 UTC m=+983.564948373" watchObservedRunningTime="2025-11-26 11:27:03.975881337 +0000 UTC m=+983.567092859" Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.987761 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bc6fbfc58-jv7cz" event={"ID":"2285a60b-0202-4abd-91de-7241e109804f","Type":"ContainerStarted","Data":"b745e04d568770282602d2e8de573615c18846d7d76022bdd232c76af56a336b"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.987809 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bc6fbfc58-jv7cz" event={"ID":"2285a60b-0202-4abd-91de-7241e109804f","Type":"ContainerStarted","Data":"2c3190f936d81b7519391adc5010d992436533b9a340db2aaf6da4cfa47f7504"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.993130 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-s866n" podStartSLOduration=11.993111819 podStartE2EDuration="11.993111819s" podCreationTimestamp="2025-11-26 11:26:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:03.989900369 +0000 UTC m=+983.581111891" watchObservedRunningTime="2025-11-26 11:27:03.993111819 +0000 UTC m=+983.584323341" Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.994614 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerStarted","Data":"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.994641 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerStarted","Data":"9f3132ffd108d109a6ccd4000ac08df58f3d4e9085727ddc54d6cf6f2cabfc4a"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.994978 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.998347 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerStarted","Data":"db66f6b6e8a9d8a3409bda6d5204063e52dfe897279be8e132b9d8af69263fd6"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.998370 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerStarted","Data":"ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a"} Nov 26 11:27:03 crc kubenswrapper[4622]: I1126 11:27:03.998381 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerStarted","Data":"b2cb0cbcbebd97542e946031d9909db16c75abf625bd93306c5d0c28ad369d4a"} Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.030622 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-868fc87bbf-49ntq" podStartSLOduration=2.519993112 podStartE2EDuration="25.030600984s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="2025-11-26 11:26:40.303443997 +0000 UTC m=+959.894655519" lastFinishedPulling="2025-11-26 11:27:02.814051868 +0000 UTC m=+982.405263391" observedRunningTime="2025-11-26 11:27:04.008787316 +0000 UTC m=+983.599998837" watchObservedRunningTime="2025-11-26 11:27:04.030600984 +0000 UTC m=+983.621812506" Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.064116 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-686b8b87c4-gqg7z" podStartSLOduration=17.064094179 podStartE2EDuration="17.064094179s" podCreationTimestamp="2025-11-26 11:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:04.056818582 +0000 UTC m=+983.648030104" watchObservedRunningTime="2025-11-26 11:27:04.064094179 +0000 UTC m=+983.655305701" Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.065890 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-bc6fbfc58-jv7cz" podStartSLOduration=17.065883335 podStartE2EDuration="17.065883335s" podCreationTimestamp="2025-11-26 11:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:04.032948854 +0000 UTC m=+983.624160376" watchObservedRunningTime="2025-11-26 11:27:04.065883335 +0000 UTC m=+983.657094858" Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.074407 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-78d76c8f88-kbpmq" podStartSLOduration=8.074385908 podStartE2EDuration="8.074385908s" podCreationTimestamp="2025-11-26 11:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:04.074006122 +0000 UTC m=+983.665217644" watchObservedRunningTime="2025-11-26 11:27:04.074385908 +0000 UTC m=+983.665597430" Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.375520 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b5456649c-2jjn8"] Nov 26 11:27:04 crc kubenswrapper[4622]: I1126 11:27:04.714306 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fae52a64-3bac-4806-9497-1752f4d31307" path="/var/lib/kubelet/pods/fae52a64-3bac-4806-9497-1752f4d31307/volumes" Nov 26 11:27:05 crc kubenswrapper[4622]: I1126 11:27:05.006046 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" event={"ID":"0f5a5dad-9d9e-439d-9946-43da9af3caf2","Type":"ContainerStarted","Data":"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f"} Nov 26 11:27:05 crc kubenswrapper[4622]: I1126 11:27:05.006333 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:27:05 crc kubenswrapper[4622]: I1126 11:27:05.012141 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bc6fbfc58-jv7cz" event={"ID":"2285a60b-0202-4abd-91de-7241e109804f","Type":"ContainerStarted","Data":"a9fb99fe35490d93fb67a5400b5d0b68e0e626250788ba8fbf52e9be92f2150c"} Nov 26 11:27:05 crc kubenswrapper[4622]: I1126 11:27:05.015266 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerStarted","Data":"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139"} Nov 26 11:27:05 crc kubenswrapper[4622]: I1126 11:27:05.033833 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" podStartSLOduration=9.033809553 podStartE2EDuration="9.033809553s" podCreationTimestamp="2025-11-26 11:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:05.026233098 +0000 UTC m=+984.617444619" watchObservedRunningTime="2025-11-26 11:27:05.033809553 +0000 UTC m=+984.625021075" Nov 26 11:27:05 crc kubenswrapper[4622]: W1126 11:27:05.146264 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86ef3a8d_79f3_49f4_8e47_cf74f7618f66.slice/crio-8a17d769916145b00a6d9caab0542658de2f9f69f0e012a2fd6ebd568a248593 WatchSource:0}: Error finding container 8a17d769916145b00a6d9caab0542658de2f9f69f0e012a2fd6ebd568a248593: Status 404 returned error can't find the container with id 8a17d769916145b00a6d9caab0542658de2f9f69f0e012a2fd6ebd568a248593 Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.028492 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b5456649c-2jjn8" event={"ID":"86ef3a8d-79f3-49f4-8e47-cf74f7618f66","Type":"ContainerStarted","Data":"d4a3718ccd493e72fa3fc1a2ee3400b37e450d80d2af6733e94c4b5e8d791bf6"} Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.029605 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b5456649c-2jjn8" event={"ID":"86ef3a8d-79f3-49f4-8e47-cf74f7618f66","Type":"ContainerStarted","Data":"10a1e7f2e0416a5aec1432eae33d77b442f044e4352bad88f32a1e858c456de7"} Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.029637 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.029668 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b5456649c-2jjn8" event={"ID":"86ef3a8d-79f3-49f4-8e47-cf74f7618f66","Type":"ContainerStarted","Data":"8a17d769916145b00a6d9caab0542658de2f9f69f0e012a2fd6ebd568a248593"} Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.031455 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerStarted","Data":"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854"} Nov 26 11:27:06 crc kubenswrapper[4622]: I1126 11:27:06.047446 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b5456649c-2jjn8" podStartSLOduration=8.047423275 podStartE2EDuration="8.047423275s" podCreationTimestamp="2025-11-26 11:26:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:06.0457879 +0000 UTC m=+985.636999421" watchObservedRunningTime="2025-11-26 11:27:06.047423275 +0000 UTC m=+985.638634786" Nov 26 11:27:07 crc kubenswrapper[4622]: I1126 11:27:07.054079 4622 generic.go:334] "Generic (PLEG): container finished" podID="18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" containerID="4b4b4dc95ecc57fcd244dcf874d755e7b0c55f3a0d6b2066b31c5b6b9ddfdef8" exitCode=0 Nov 26 11:27:07 crc kubenswrapper[4622]: I1126 11:27:07.054177 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s866n" event={"ID":"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1","Type":"ContainerDied","Data":"4b4b4dc95ecc57fcd244dcf874d755e7b0c55f3a0d6b2066b31c5b6b9ddfdef8"} Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.029689 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.030064 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.126462 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.126529 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.838170 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s866n" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.861053 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.861248 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.861282 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.861311 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.861432 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpwvd\" (UniqueName: \"kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.862448 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys\") pod \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\" (UID: \"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1\") " Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.867819 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.868484 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.868553 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd" (OuterVolumeSpecName: "kube-api-access-jpwvd") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "kube-api-access-jpwvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.881004 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts" (OuterVolumeSpecName: "scripts") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.892417 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.909912 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data" (OuterVolumeSpecName: "config-data") pod "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" (UID: "18f1d84a-d71d-4c14-8d60-7e3dc4061fb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964306 4622 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964341 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964352 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964363 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpwvd\" (UniqueName: \"kubernetes.io/projected/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-kube-api-access-jpwvd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964375 4622 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:08 crc kubenswrapper[4622]: I1126 11:27:08.964382 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.077319 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s866n" event={"ID":"18f1d84a-d71d-4c14-8d60-7e3dc4061fb1","Type":"ContainerDied","Data":"5f14434bcdecd770189839cb686798fbda78f28764a45589b42aa25ab6a79c9d"} Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.077377 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f14434bcdecd770189839cb686798fbda78f28764a45589b42aa25ab6a79c9d" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.077445 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s866n" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.172127 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5b79cbd499-vlhtt"] Nov 26 11:27:09 crc kubenswrapper[4622]: E1126 11:27:09.172682 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" containerName="keystone-bootstrap" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.172708 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" containerName="keystone-bootstrap" Nov 26 11:27:09 crc kubenswrapper[4622]: E1126 11:27:09.172743 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="init" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.172750 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="init" Nov 26 11:27:09 crc kubenswrapper[4622]: E1126 11:27:09.172777 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.172784 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.172986 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b9f8eb9-3de8-420c-95a7-af73fc7b33b4" containerName="dnsmasq-dns" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.173008 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" containerName="keystone-bootstrap" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.173815 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.176479 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gr2p8" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.176564 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.176742 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.176966 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.177163 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.177440 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.181817 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5b79cbd499-vlhtt"] Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275646 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr8hp\" (UniqueName: \"kubernetes.io/projected/eccaaa42-60dc-4995-b79e-c11979b75650-kube-api-access-nr8hp\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275697 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-public-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275719 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-combined-ca-bundle\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275889 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-scripts\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275938 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-credential-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.275964 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-internal-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.276007 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-fernet-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.276150 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-config-data\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.378924 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-config-data\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379241 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr8hp\" (UniqueName: \"kubernetes.io/projected/eccaaa42-60dc-4995-b79e-c11979b75650-kube-api-access-nr8hp\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379264 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-public-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379281 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-combined-ca-bundle\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379332 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-scripts\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379354 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-credential-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379373 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-internal-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.379401 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-fernet-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.386426 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-credential-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.386519 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-config-data\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.386721 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-scripts\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.386915 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-public-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.388025 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-internal-tls-certs\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.389128 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-combined-ca-bundle\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.390031 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eccaaa42-60dc-4995-b79e-c11979b75650-fernet-keys\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.394907 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr8hp\" (UniqueName: \"kubernetes.io/projected/eccaaa42-60dc-4995-b79e-c11979b75650-kube-api-access-nr8hp\") pod \"keystone-5b79cbd499-vlhtt\" (UID: \"eccaaa42-60dc-4995-b79e-c11979b75650\") " pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.505125 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:09 crc kubenswrapper[4622]: I1126 11:27:09.689317 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:27:11 crc kubenswrapper[4622]: I1126 11:27:11.452752 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:27:11 crc kubenswrapper[4622]: I1126 11:27:11.507687 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:27:11 crc kubenswrapper[4622]: I1126 11:27:11.507932 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="dnsmasq-dns" containerID="cri-o://7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47" gracePeriod=10 Nov 26 11:27:11 crc kubenswrapper[4622]: I1126 11:27:11.537937 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.111179 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.113312 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerStarted","Data":"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b"} Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.115676 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2mgt7" event={"ID":"6bd2cbb6-f985-402e-845c-ea4f8e2f970e","Type":"ContainerStarted","Data":"288da523a4df50e70f490a81f781f5ff9e80284482b2b2e02c8f5e38ecdc533b"} Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.121806 4622 generic.go:334] "Generic (PLEG): container finished" podID="01c222b2-900b-401d-8ec2-fb9889927110" containerID="7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47" exitCode=0 Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.121841 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" event={"ID":"01c222b2-900b-401d-8ec2-fb9889927110","Type":"ContainerDied","Data":"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47"} Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.121861 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" event={"ID":"01c222b2-900b-401d-8ec2-fb9889927110","Type":"ContainerDied","Data":"e6c7d898c775ccf85d609340977127318c879e6ee17cf5fc8d1f95ec2925d6ac"} Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.121881 4622 scope.go:117] "RemoveContainer" containerID="7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.121989 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f4bdbdb7-bq6zq" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.160829 4622 scope.go:117] "RemoveContainer" containerID="e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.197590 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-2mgt7" podStartSLOduration=3.089378182 podStartE2EDuration="33.197567692s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="2025-11-26 11:26:40.549512486 +0000 UTC m=+960.140724007" lastFinishedPulling="2025-11-26 11:27:10.657701995 +0000 UTC m=+990.248913517" observedRunningTime="2025-11-26 11:27:12.16663043 +0000 UTC m=+991.757841952" watchObservedRunningTime="2025-11-26 11:27:12.197567692 +0000 UTC m=+991.788779214" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.220077 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5b79cbd499-vlhtt"] Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.222745 4622 scope.go:117] "RemoveContainer" containerID="7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47" Nov 26 11:27:12 crc kubenswrapper[4622]: E1126 11:27:12.230640 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47\": container with ID starting with 7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47 not found: ID does not exist" containerID="7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.230694 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47"} err="failed to get container status \"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47\": rpc error: code = NotFound desc = could not find container \"7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47\": container with ID starting with 7f4073bd5445cfe12e2cdfaa00835c8cb1ddf71fd0c3f9f861ad26f0cdbecf47 not found: ID does not exist" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.230725 4622 scope.go:117] "RemoveContainer" containerID="e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c" Nov 26 11:27:12 crc kubenswrapper[4622]: E1126 11:27:12.231484 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c\": container with ID starting with e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c not found: ID does not exist" containerID="e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.231533 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c"} err="failed to get container status \"e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c\": rpc error: code = NotFound desc = could not find container \"e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c\": container with ID starting with e090d2c5bf319a1f1f1df8240949bf5385ad13099a7ab5887f396cd98cf8416c not found: ID does not exist" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.242277 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb\") pod \"01c222b2-900b-401d-8ec2-fb9889927110\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.242749 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb\") pod \"01c222b2-900b-401d-8ec2-fb9889927110\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.242794 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njlfs\" (UniqueName: \"kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs\") pod \"01c222b2-900b-401d-8ec2-fb9889927110\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.242892 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config\") pod \"01c222b2-900b-401d-8ec2-fb9889927110\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.242918 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc\") pod \"01c222b2-900b-401d-8ec2-fb9889927110\" (UID: \"01c222b2-900b-401d-8ec2-fb9889927110\") " Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.255893 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs" (OuterVolumeSpecName: "kube-api-access-njlfs") pod "01c222b2-900b-401d-8ec2-fb9889927110" (UID: "01c222b2-900b-401d-8ec2-fb9889927110"). InnerVolumeSpecName "kube-api-access-njlfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.296779 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "01c222b2-900b-401d-8ec2-fb9889927110" (UID: "01c222b2-900b-401d-8ec2-fb9889927110"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.311875 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config" (OuterVolumeSpecName: "config") pod "01c222b2-900b-401d-8ec2-fb9889927110" (UID: "01c222b2-900b-401d-8ec2-fb9889927110"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.321487 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "01c222b2-900b-401d-8ec2-fb9889927110" (UID: "01c222b2-900b-401d-8ec2-fb9889927110"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.322764 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "01c222b2-900b-401d-8ec2-fb9889927110" (UID: "01c222b2-900b-401d-8ec2-fb9889927110"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.345123 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.345156 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njlfs\" (UniqueName: \"kubernetes.io/projected/01c222b2-900b-401d-8ec2-fb9889927110-kube-api-access-njlfs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.345181 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.345191 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.345199 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/01c222b2-900b-401d-8ec2-fb9889927110-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.463024 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.473444 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66f4bdbdb7-bq6zq"] Nov 26 11:27:12 crc kubenswrapper[4622]: I1126 11:27:12.714557 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01c222b2-900b-401d-8ec2-fb9889927110" path="/var/lib/kubelet/pods/01c222b2-900b-401d-8ec2-fb9889927110/volumes" Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.133797 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b79cbd499-vlhtt" event={"ID":"eccaaa42-60dc-4995-b79e-c11979b75650","Type":"ContainerStarted","Data":"94ec181a639699622f094c27769bd2776b0d9dd226c1e1764d3a3a58a1266fbc"} Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.133867 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b79cbd499-vlhtt" event={"ID":"eccaaa42-60dc-4995-b79e-c11979b75650","Type":"ContainerStarted","Data":"c695aeffce448e05537e7a70ca5b862cf19e8e66ea4e88c29d3bfc856f3e36ae"} Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.133891 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.138386 4622 generic.go:334] "Generic (PLEG): container finished" podID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" containerID="288da523a4df50e70f490a81f781f5ff9e80284482b2b2e02c8f5e38ecdc533b" exitCode=0 Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.138446 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2mgt7" event={"ID":"6bd2cbb6-f985-402e-845c-ea4f8e2f970e","Type":"ContainerDied","Data":"288da523a4df50e70f490a81f781f5ff9e80284482b2b2e02c8f5e38ecdc533b"} Nov 26 11:27:13 crc kubenswrapper[4622]: I1126 11:27:13.152590 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5b79cbd499-vlhtt" podStartSLOduration=4.152569554 podStartE2EDuration="4.152569554s" podCreationTimestamp="2025-11-26 11:27:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:13.148452676 +0000 UTC m=+992.739664198" watchObservedRunningTime="2025-11-26 11:27:13.152569554 +0000 UTC m=+992.743781076" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.486368 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2mgt7" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.686554 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle\") pod \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.686687 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs\") pod \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.686845 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts\") pod \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.686893 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data\") pod \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.686915 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r59dr\" (UniqueName: \"kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr\") pod \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\" (UID: \"6bd2cbb6-f985-402e-845c-ea4f8e2f970e\") " Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.687232 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs" (OuterVolumeSpecName: "logs") pod "6bd2cbb6-f985-402e-845c-ea4f8e2f970e" (UID: "6bd2cbb6-f985-402e-845c-ea4f8e2f970e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.693626 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts" (OuterVolumeSpecName: "scripts") pod "6bd2cbb6-f985-402e-845c-ea4f8e2f970e" (UID: "6bd2cbb6-f985-402e-845c-ea4f8e2f970e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.693706 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr" (OuterVolumeSpecName: "kube-api-access-r59dr") pod "6bd2cbb6-f985-402e-845c-ea4f8e2f970e" (UID: "6bd2cbb6-f985-402e-845c-ea4f8e2f970e"). InnerVolumeSpecName "kube-api-access-r59dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.706890 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6bd2cbb6-f985-402e-845c-ea4f8e2f970e" (UID: "6bd2cbb6-f985-402e-845c-ea4f8e2f970e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.714103 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data" (OuterVolumeSpecName: "config-data") pod "6bd2cbb6-f985-402e-845c-ea4f8e2f970e" (UID: "6bd2cbb6-f985-402e-845c-ea4f8e2f970e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.790036 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.790080 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.790095 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r59dr\" (UniqueName: \"kubernetes.io/projected/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-kube-api-access-r59dr\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.790106 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:14 crc kubenswrapper[4622]: I1126 11:27:14.790131 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bd2cbb6-f985-402e-845c-ea4f8e2f970e-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.157473 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2mgt7" event={"ID":"6bd2cbb6-f985-402e-845c-ea4f8e2f970e","Type":"ContainerDied","Data":"59798216ac77779bde4f61752218b7575ad51a29464a54e88d906b82519641d6"} Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.157548 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59798216ac77779bde4f61752218b7575ad51a29464a54e88d906b82519641d6" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.157813 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2mgt7" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.621890 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-bc8786d46-z584k"] Nov 26 11:27:15 crc kubenswrapper[4622]: E1126 11:27:15.622674 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="init" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.622693 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="init" Nov 26 11:27:15 crc kubenswrapper[4622]: E1126 11:27:15.622716 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="dnsmasq-dns" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.622722 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="dnsmasq-dns" Nov 26 11:27:15 crc kubenswrapper[4622]: E1126 11:27:15.622732 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" containerName="placement-db-sync" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.622741 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" containerName="placement-db-sync" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.623192 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c222b2-900b-401d-8ec2-fb9889927110" containerName="dnsmasq-dns" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.623215 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" containerName="placement-db-sync" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.627480 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.629668 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bc8786d46-z584k"] Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.631989 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.632366 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tpzsg" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.632707 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.632956 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.635930 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.819473 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-config-data\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.819832 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-scripts\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.819871 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgm6g\" (UniqueName: \"kubernetes.io/projected/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-kube-api-access-cgm6g\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.819905 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-internal-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.819971 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-public-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.820033 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-combined-ca-bundle\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.820313 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-logs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923080 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-logs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923417 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-config-data\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923455 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-scripts\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923495 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgm6g\" (UniqueName: \"kubernetes.io/projected/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-kube-api-access-cgm6g\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923553 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-internal-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923612 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-public-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.923670 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-combined-ca-bundle\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.926452 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-logs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.932018 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-config-data\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.937673 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-public-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.938976 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-combined-ca-bundle\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.940934 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgm6g\" (UniqueName: \"kubernetes.io/projected/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-kube-api-access-cgm6g\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.941045 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-scripts\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.944680 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2-internal-tls-certs\") pod \"placement-bc8786d46-z584k\" (UID: \"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2\") " pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:15 crc kubenswrapper[4622]: I1126 11:27:15.966114 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:18 crc kubenswrapper[4622]: I1126 11:27:18.037628 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Nov 26 11:27:18 crc kubenswrapper[4622]: I1126 11:27:18.128407 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-bc6fbfc58-jv7cz" podUID="2285a60b-0202-4abd-91de-7241e109804f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.141:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.141:8443: connect: connection refused" Nov 26 11:27:18 crc kubenswrapper[4622]: I1126 11:27:18.830002 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bc8786d46-z584k"] Nov 26 11:27:18 crc kubenswrapper[4622]: W1126 11:27:18.842747 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5eaa296b_2eb5_4729_ad5d_1b3bc85e93a2.slice/crio-5928f74cd9f86db5788903bfbefe1d7e598bbbe06a5ce3efd0a4a4e964028cd8 WatchSource:0}: Error finding container 5928f74cd9f86db5788903bfbefe1d7e598bbbe06a5ce3efd0a4a4e964028cd8: Status 404 returned error can't find the container with id 5928f74cd9f86db5788903bfbefe1d7e598bbbe06a5ce3efd0a4a4e964028cd8 Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201364 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerStarted","Data":"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9"} Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201694 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201514 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-central-agent" containerID="cri-o://7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8" gracePeriod=30 Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201790 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="proxy-httpd" containerID="cri-o://5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9" gracePeriod=30 Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201873 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-notification-agent" containerID="cri-o://7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854" gracePeriod=30 Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.201903 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="sg-core" containerID="cri-o://71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b" gracePeriod=30 Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.209814 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bc8786d46-z584k" event={"ID":"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2","Type":"ContainerStarted","Data":"a8414e7f0b3bfda7fc2ca95c32fdf3510f4e5a5946570dc23fba665b8ee94716"} Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.209847 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bc8786d46-z584k" event={"ID":"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2","Type":"ContainerStarted","Data":"5928f74cd9f86db5788903bfbefe1d7e598bbbe06a5ce3efd0a4a4e964028cd8"} Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.212453 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sq2s5" event={"ID":"57b0b68f-f25e-417e-ae0f-55d2361b8df6","Type":"ContainerStarted","Data":"69c9733686ce870a7e858d8928dab96e5c068832ac1342d109bb4d7b2d83c958"} Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.214832 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j8fgr" event={"ID":"99de607d-316d-4435-b18a-c6eeb950da19","Type":"ContainerStarted","Data":"f4c76e7d8077f210da0fa65bfad4dc1a3bf17465478bb43b49d6e41e30bcdab2"} Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.225120 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.082999573 podStartE2EDuration="40.225109315s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="2025-11-26 11:26:40.301567396 +0000 UTC m=+959.892778919" lastFinishedPulling="2025-11-26 11:27:18.443677139 +0000 UTC m=+998.034888661" observedRunningTime="2025-11-26 11:27:19.219964336 +0000 UTC m=+998.811175858" watchObservedRunningTime="2025-11-26 11:27:19.225109315 +0000 UTC m=+998.816320837" Nov 26 11:27:19 crc kubenswrapper[4622]: I1126 11:27:19.250094 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-j8fgr" podStartSLOduration=2.197800221 podStartE2EDuration="40.250070021s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="2025-11-26 11:26:40.379831697 +0000 UTC m=+959.971043220" lastFinishedPulling="2025-11-26 11:27:18.432101497 +0000 UTC m=+998.023313020" observedRunningTime="2025-11-26 11:27:19.242335798 +0000 UTC m=+998.833547319" watchObservedRunningTime="2025-11-26 11:27:19.250070021 +0000 UTC m=+998.841281543" Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.224488 4622 generic.go:334] "Generic (PLEG): container finished" podID="99de607d-316d-4435-b18a-c6eeb950da19" containerID="f4c76e7d8077f210da0fa65bfad4dc1a3bf17465478bb43b49d6e41e30bcdab2" exitCode=0 Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.224583 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j8fgr" event={"ID":"99de607d-316d-4435-b18a-c6eeb950da19","Type":"ContainerDied","Data":"f4c76e7d8077f210da0fa65bfad4dc1a3bf17465478bb43b49d6e41e30bcdab2"} Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227726 4622 generic.go:334] "Generic (PLEG): container finished" podID="0d0cf662-0c27-42e7-842a-2479b180995f" containerID="5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9" exitCode=0 Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227748 4622 generic.go:334] "Generic (PLEG): container finished" podID="0d0cf662-0c27-42e7-842a-2479b180995f" containerID="71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b" exitCode=2 Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227756 4622 generic.go:334] "Generic (PLEG): container finished" podID="0d0cf662-0c27-42e7-842a-2479b180995f" containerID="7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8" exitCode=0 Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227757 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerDied","Data":"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9"} Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227799 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerDied","Data":"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b"} Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.227810 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerDied","Data":"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8"} Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.229711 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bc8786d46-z584k" event={"ID":"5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2","Type":"ContainerStarted","Data":"a52de1bba01cf04e3ae759c08fb60aa59375a5b2eba62263251612f54bfca76a"} Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.229875 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.239250 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sq2s5" podStartSLOduration=2.8591107879999997 podStartE2EDuration="41.239231956s" podCreationTimestamp="2025-11-26 11:26:39 +0000 UTC" firstStartedPulling="2025-11-26 11:26:40.051486387 +0000 UTC m=+959.642697910" lastFinishedPulling="2025-11-26 11:27:18.431607555 +0000 UTC m=+998.022819078" observedRunningTime="2025-11-26 11:27:19.273909141 +0000 UTC m=+998.865120664" watchObservedRunningTime="2025-11-26 11:27:20.239231956 +0000 UTC m=+999.830443479" Nov 26 11:27:20 crc kubenswrapper[4622]: I1126 11:27:20.261766 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-bc8786d46-z584k" podStartSLOduration=5.261749844 podStartE2EDuration="5.261749844s" podCreationTimestamp="2025-11-26 11:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:20.254989659 +0000 UTC m=+999.846201180" watchObservedRunningTime="2025-11-26 11:27:20.261749844 +0000 UTC m=+999.852961366" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.158190 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.237854 4622 generic.go:334] "Generic (PLEG): container finished" podID="0d0cf662-0c27-42e7-842a-2479b180995f" containerID="7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854" exitCode=0 Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.237929 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.237973 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerDied","Data":"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854"} Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.238027 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0d0cf662-0c27-42e7-842a-2479b180995f","Type":"ContainerDied","Data":"eec5e58d1022ab50d72f5b49babb308768da57ad007939eab3dd7499231efe07"} Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.238048 4622 scope.go:117] "RemoveContainer" containerID="5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.238697 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244441 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244529 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244557 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244631 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244676 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244716 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.244760 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlvzs\" (UniqueName: \"kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs\") pod \"0d0cf662-0c27-42e7-842a-2479b180995f\" (UID: \"0d0cf662-0c27-42e7-842a-2479b180995f\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.245689 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.245833 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.256423 4622 scope.go:117] "RemoveContainer" containerID="71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.264253 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts" (OuterVolumeSpecName: "scripts") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.267590 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.269985 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs" (OuterVolumeSpecName: "kube-api-access-wlvzs") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "kube-api-access-wlvzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.316470 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.336032 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data" (OuterVolumeSpecName: "config-data") pod "0d0cf662-0c27-42e7-842a-2479b180995f" (UID: "0d0cf662-0c27-42e7-842a-2479b180995f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347182 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347213 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347224 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347234 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347242 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d0cf662-0c27-42e7-842a-2479b180995f-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347251 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0d0cf662-0c27-42e7-842a-2479b180995f-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.347261 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlvzs\" (UniqueName: \"kubernetes.io/projected/0d0cf662-0c27-42e7-842a-2479b180995f-kube-api-access-wlvzs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.381528 4622 scope.go:117] "RemoveContainer" containerID="7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.399729 4622 scope.go:117] "RemoveContainer" containerID="7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.414384 4622 scope.go:117] "RemoveContainer" containerID="5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.414840 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9\": container with ID starting with 5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9 not found: ID does not exist" containerID="5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.414885 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9"} err="failed to get container status \"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9\": rpc error: code = NotFound desc = could not find container \"5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9\": container with ID starting with 5d7067e3496e767f8b18c1fb7a1b3c594f255743094b30ca235808eadfc532a9 not found: ID does not exist" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.414915 4622 scope.go:117] "RemoveContainer" containerID="71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.415220 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b\": container with ID starting with 71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b not found: ID does not exist" containerID="71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.415255 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b"} err="failed to get container status \"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b\": rpc error: code = NotFound desc = could not find container \"71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b\": container with ID starting with 71b363fb3a8e0b288879ed4d09d0007b7f68e4490b8cbf34377593ef1884ce3b not found: ID does not exist" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.415277 4622 scope.go:117] "RemoveContainer" containerID="7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.415537 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854\": container with ID starting with 7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854 not found: ID does not exist" containerID="7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.415562 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854"} err="failed to get container status \"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854\": rpc error: code = NotFound desc = could not find container \"7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854\": container with ID starting with 7f87a60c75eaeafd945b2d57a8d81e76dd9c64bdbd2821cc8b597f9309849854 not found: ID does not exist" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.415577 4622 scope.go:117] "RemoveContainer" containerID="7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.415848 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8\": container with ID starting with 7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8 not found: ID does not exist" containerID="7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.415873 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8"} err="failed to get container status \"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8\": rpc error: code = NotFound desc = could not find container \"7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8\": container with ID starting with 7478c478b07c3a92aceadea3f7440c84667ca495b4cf664b452d092fdc7b23d8 not found: ID does not exist" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.441028 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.549574 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mdln\" (UniqueName: \"kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln\") pod \"99de607d-316d-4435-b18a-c6eeb950da19\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.549852 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data\") pod \"99de607d-316d-4435-b18a-c6eeb950da19\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.550040 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle\") pod \"99de607d-316d-4435-b18a-c6eeb950da19\" (UID: \"99de607d-316d-4435-b18a-c6eeb950da19\") " Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.553903 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln" (OuterVolumeSpecName: "kube-api-access-6mdln") pod "99de607d-316d-4435-b18a-c6eeb950da19" (UID: "99de607d-316d-4435-b18a-c6eeb950da19"). InnerVolumeSpecName "kube-api-access-6mdln". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.554946 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "99de607d-316d-4435-b18a-c6eeb950da19" (UID: "99de607d-316d-4435-b18a-c6eeb950da19"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.571302 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.577161 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.579111 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99de607d-316d-4435-b18a-c6eeb950da19" (UID: "99de607d-316d-4435-b18a-c6eeb950da19"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584131 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.584441 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="proxy-httpd" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584458 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="proxy-httpd" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.584470 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-notification-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584477 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-notification-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.584491 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="sg-core" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584512 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="sg-core" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.584522 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-central-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584527 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-central-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: E1126 11:27:21.584549 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99de607d-316d-4435-b18a-c6eeb950da19" containerName="barbican-db-sync" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584557 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="99de607d-316d-4435-b18a-c6eeb950da19" containerName="barbican-db-sync" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584698 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="sg-core" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584714 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-central-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584729 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="99de607d-316d-4435-b18a-c6eeb950da19" containerName="barbican-db-sync" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584739 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="proxy-httpd" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.584750 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" containerName="ceilometer-notification-agent" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.586057 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.587581 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.587864 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.594072 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.651806 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.651869 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652051 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652230 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652334 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652389 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgps2\" (UniqueName: \"kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652481 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652622 4622 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652649 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99de607d-316d-4435-b18a-c6eeb950da19-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.652661 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mdln\" (UniqueName: \"kubernetes.io/projected/99de607d-316d-4435-b18a-c6eeb950da19-kube-api-access-6mdln\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.754377 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.754454 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.754534 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755072 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755386 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755470 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755559 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgps2\" (UniqueName: \"kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755595 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.755882 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.757249 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.757646 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.758134 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.758649 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.769881 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgps2\" (UniqueName: \"kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2\") pod \"ceilometer-0\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " pod="openstack/ceilometer-0" Nov 26 11:27:21 crc kubenswrapper[4622]: I1126 11:27:21.898806 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.249539 4622 generic.go:334] "Generic (PLEG): container finished" podID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" containerID="69c9733686ce870a7e858d8928dab96e5c068832ac1342d109bb4d7b2d83c958" exitCode=0 Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.249619 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sq2s5" event={"ID":"57b0b68f-f25e-417e-ae0f-55d2361b8df6","Type":"ContainerDied","Data":"69c9733686ce870a7e858d8928dab96e5c068832ac1342d109bb4d7b2d83c958"} Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.251671 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-j8fgr" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.251713 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-j8fgr" event={"ID":"99de607d-316d-4435-b18a-c6eeb950da19","Type":"ContainerDied","Data":"ba9f1c063a9a636f1389edfe16a5dd4506a2cc8c7d8afb8b711c81b48860e764"} Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.251738 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba9f1c063a9a636f1389edfe16a5dd4506a2cc8c7d8afb8b711c81b48860e764" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.282026 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.580136 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-755d984c7c-rw8ld"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.584193 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.587326 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s9gjm" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.587611 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 26 11:27:22 crc kubenswrapper[4622]: W1126 11:27:22.587679 4622 reflector.go:561] object-"openstack"/"barbican-worker-config-data": failed to list *v1.Secret: secrets "barbican-worker-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 26 11:27:22 crc kubenswrapper[4622]: E1126 11:27:22.587720 4622 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"barbican-worker-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"barbican-worker-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.594323 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-755d984c7c-rw8ld"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.645123 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7445bd57d8-4bsbs"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.646751 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.653979 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.666444 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.668096 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.675905 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7445bd57d8-4bsbs"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0174809-96a2-416e-82b9-9519d71a81d2-logs\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681310 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-combined-ca-bundle\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681353 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681442 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data-custom\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681474 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-combined-ca-bundle\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681512 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681540 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws594\" (UniqueName: \"kubernetes.io/projected/8523271e-6368-4a2a-b81e-fd513c441cb6-kube-api-access-ws594\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681608 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8523271e-6368-4a2a-b81e-fd513c441cb6-logs\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681667 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ttwg\" (UniqueName: \"kubernetes.io/projected/e0174809-96a2-416e-82b9-9519d71a81d2-kube-api-access-2ttwg\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.681729 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data-custom\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.691148 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.722439 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d0cf662-0c27-42e7-842a-2479b180995f" path="/var/lib/kubelet/pods/0d0cf662-0c27-42e7-842a-2479b180995f/volumes" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.783929 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0174809-96a2-416e-82b9-9519d71a81d2-logs\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.783997 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-combined-ca-bundle\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784042 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784137 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data-custom\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784186 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-combined-ca-bundle\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784220 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784250 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws594\" (UniqueName: \"kubernetes.io/projected/8523271e-6368-4a2a-b81e-fd513c441cb6-kube-api-access-ws594\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784303 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784384 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8523271e-6368-4a2a-b81e-fd513c441cb6-logs\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.784486 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0174809-96a2-416e-82b9-9519d71a81d2-logs\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789199 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8523271e-6368-4a2a-b81e-fd513c441cb6-logs\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789370 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789480 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ttwg\" (UniqueName: \"kubernetes.io/projected/e0174809-96a2-416e-82b9-9519d71a81d2-kube-api-access-2ttwg\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789650 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbzdq\" (UniqueName: \"kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789760 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data-custom\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789832 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789947 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.789836 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.790617 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-combined-ca-bundle\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.792541 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.793958 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-config-data-custom\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.801411 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0174809-96a2-416e-82b9-9519d71a81d2-combined-ca-bundle\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.804998 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws594\" (UniqueName: \"kubernetes.io/projected/8523271e-6368-4a2a-b81e-fd513c441cb6-kube-api-access-ws594\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.809900 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ttwg\" (UniqueName: \"kubernetes.io/projected/e0174809-96a2-416e-82b9-9519d71a81d2-kube-api-access-2ttwg\") pod \"barbican-keystone-listener-7445bd57d8-4bsbs\" (UID: \"e0174809-96a2-416e-82b9-9519d71a81d2\") " pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.809931 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.811395 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.813759 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.818367 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891565 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891665 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891713 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891740 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srjrl\" (UniqueName: \"kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891780 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbzdq\" (UniqueName: \"kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891819 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891852 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.891949 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892052 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892184 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892511 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892518 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892688 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.892853 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.909799 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbzdq\" (UniqueName: \"kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq\") pod \"dnsmasq-dns-844b557b9c-rld8l\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.978759 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.991437 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994361 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994435 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srjrl\" (UniqueName: \"kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994630 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994693 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994748 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.994801 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.998379 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.999001 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:22 crc kubenswrapper[4622]: I1126 11:27:22.999451 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.010376 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srjrl\" (UniqueName: \"kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl\") pod \"barbican-api-649b8ddf5b-rsqxp\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.170209 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.267713 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerStarted","Data":"530e717935b95a142cf8f6e8aab4130e0c602ddd86d3688154bc5b66083f9be5"} Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.426550 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7445bd57d8-4bsbs"] Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.485163 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.519486 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.601988 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.607279 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8523271e-6368-4a2a-b81e-fd513c441cb6-config-data-custom\") pod \"barbican-worker-755d984c7c-rw8ld\" (UID: \"8523271e-6368-4a2a-b81e-fd513c441cb6\") " pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.660108 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.717704 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718014 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718148 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718196 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsct9\" (UniqueName: \"kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718216 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718249 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data\") pod \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\" (UID: \"57b0b68f-f25e-417e-ae0f-55d2361b8df6\") " Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.718149 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.719305 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57b0b68f-f25e-417e-ae0f-55d2361b8df6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.721285 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9" (OuterVolumeSpecName: "kube-api-access-vsct9") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "kube-api-access-vsct9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.721575 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts" (OuterVolumeSpecName: "scripts") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.722431 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.742346 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.759664 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data" (OuterVolumeSpecName: "config-data") pod "57b0b68f-f25e-417e-ae0f-55d2361b8df6" (UID: "57b0b68f-f25e-417e-ae0f-55d2361b8df6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.802993 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-755d984c7c-rw8ld" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.821107 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.821151 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.821167 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsct9\" (UniqueName: \"kubernetes.io/projected/57b0b68f-f25e-417e-ae0f-55d2361b8df6-kube-api-access-vsct9\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.821191 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:23 crc kubenswrapper[4622]: I1126 11:27:23.821205 4622 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57b0b68f-f25e-417e-ae0f-55d2361b8df6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.203346 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-755d984c7c-rw8ld"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.278040 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerStarted","Data":"8439cda5af9da9540d1a8b72894a00c826f2ff0a44ec5e884645dd2a3a2680c3"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.278106 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerStarted","Data":"32d7184bbad1b351dbdb44ca43874ea27f710aa23f520da7343b6dc77a7f558e"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.281245 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sq2s5" event={"ID":"57b0b68f-f25e-417e-ae0f-55d2361b8df6","Type":"ContainerDied","Data":"72a08c3ebfbd0a9e6916694b857a013908a2ee3734415f41f42fe4096374db4a"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.281327 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72a08c3ebfbd0a9e6916694b857a013908a2ee3734415f41f42fe4096374db4a" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.281266 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sq2s5" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.282737 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-755d984c7c-rw8ld" event={"ID":"8523271e-6368-4a2a-b81e-fd513c441cb6","Type":"ContainerStarted","Data":"5c8f9faeac0b8c6d4a1e393861821a011ced6a815218ccda28ed9254a116c1ea"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.284663 4622 generic.go:334] "Generic (PLEG): container finished" podID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerID="9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56" exitCode=0 Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.284801 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" event={"ID":"fb01c7fc-cb10-4308-a1bc-cd3c7873c383","Type":"ContainerDied","Data":"9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.284845 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" event={"ID":"fb01c7fc-cb10-4308-a1bc-cd3c7873c383","Type":"ContainerStarted","Data":"0ef2e758c12c0df261bd124d3d15a7e0183785592edb4f4bc21991819ca95199"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.288208 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerStarted","Data":"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.288262 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerStarted","Data":"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.288277 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerStarted","Data":"50e27f65b451f1e2df4952c48edc1a35dc594ff956b5b0563657dbcf8cb01578"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.288453 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.288519 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.293745 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" event={"ID":"e0174809-96a2-416e-82b9-9519d71a81d2","Type":"ContainerStarted","Data":"2212317e03c7317aa45da3a2c7d40ccbd71f7d7b26e67eb39b89780e45678452"} Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.344986 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-649b8ddf5b-rsqxp" podStartSLOduration=2.344965658 podStartE2EDuration="2.344965658s" podCreationTimestamp="2025-11-26 11:27:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:24.338848456 +0000 UTC m=+1003.930059988" watchObservedRunningTime="2025-11-26 11:27:24.344965658 +0000 UTC m=+1003.936177180" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.479079 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:24 crc kubenswrapper[4622]: E1126 11:27:24.479711 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" containerName="cinder-db-sync" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.479727 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" containerName="cinder-db-sync" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.479952 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" containerName="cinder-db-sync" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.480825 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.486420 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.487402 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.487713 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.488064 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-grv44" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.500890 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.526350 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545118 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545483 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545596 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545687 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545711 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.545754 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.546083 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p52k4\" (UniqueName: \"kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.548065 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.555689 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.647901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.647938 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648000 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648034 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648204 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648256 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd7mx\" (UniqueName: \"kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648293 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648312 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p52k4\" (UniqueName: \"kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648369 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648415 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648465 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.648027 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.652873 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.655636 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.657223 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.659882 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.682140 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p52k4\" (UniqueName: \"kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4\") pod \"cinder-scheduler-0\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.750356 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.750463 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.750488 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd7mx\" (UniqueName: \"kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.750525 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.750555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.751433 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.751652 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.751668 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.751779 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.766598 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd7mx\" (UniqueName: \"kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx\") pod \"dnsmasq-dns-775457b975-fbhtg\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.784909 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.793272 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.795401 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.795766 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.809425 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.852727 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.852820 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.853026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.853052 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djbf5\" (UniqueName: \"kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.854066 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.854102 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.854117 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.874429 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956038 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956091 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956114 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956166 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956243 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956320 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956337 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djbf5\" (UniqueName: \"kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.956858 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.957413 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.961686 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.962029 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.966231 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.966649 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:24 crc kubenswrapper[4622]: I1126 11:27:24.971815 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djbf5\" (UniqueName: \"kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5\") pod \"cinder-api-0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " pod="openstack/cinder-api-0" Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.106465 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.310761 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" event={"ID":"fb01c7fc-cb10-4308-a1bc-cd3c7873c383","Type":"ContainerStarted","Data":"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b"} Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.310985 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="dnsmasq-dns" containerID="cri-o://80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b" gracePeriod=10 Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.336129 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" podStartSLOduration=3.336114713 podStartE2EDuration="3.336114713s" podCreationTimestamp="2025-11-26 11:27:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:25.329478823 +0000 UTC m=+1004.920690345" watchObservedRunningTime="2025-11-26 11:27:25.336114713 +0000 UTC m=+1004.927326236" Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.479542 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:27:25 crc kubenswrapper[4622]: W1126 11:27:25.496552 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc506ee05_4c61_48c9_a61d_2a641b387abd.slice/crio-4043a34847a6d241ccf89f27b78cfff405953bc07e0ac8df93284014dc8676c8 WatchSource:0}: Error finding container 4043a34847a6d241ccf89f27b78cfff405953bc07e0ac8df93284014dc8676c8: Status 404 returned error can't find the container with id 4043a34847a6d241ccf89f27b78cfff405953bc07e0ac8df93284014dc8676c8 Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.549629 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.671340 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:25 crc kubenswrapper[4622]: W1126 11:27:25.955629 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55256700_597f_475c_ba51_686928866fe0.slice/crio-6662d6d8dd2256242ac2133587256623c0b650e3b779dfac4dfbe2e356a1685d WatchSource:0}: Error finding container 6662d6d8dd2256242ac2133587256623c0b650e3b779dfac4dfbe2e356a1685d: Status 404 returned error can't find the container with id 6662d6d8dd2256242ac2133587256623c0b650e3b779dfac4dfbe2e356a1685d Nov 26 11:27:25 crc kubenswrapper[4622]: I1126 11:27:25.965684 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.088748 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc\") pod \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.089100 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbzdq\" (UniqueName: \"kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq\") pod \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.089133 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config\") pod \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.089306 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb\") pod \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.089383 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb\") pod \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\" (UID: \"fb01c7fc-cb10-4308-a1bc-cd3c7873c383\") " Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.116103 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq" (OuterVolumeSpecName: "kube-api-access-jbzdq") pod "fb01c7fc-cb10-4308-a1bc-cd3c7873c383" (UID: "fb01c7fc-cb10-4308-a1bc-cd3c7873c383"). InnerVolumeSpecName "kube-api-access-jbzdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.162951 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config" (OuterVolumeSpecName: "config") pod "fb01c7fc-cb10-4308-a1bc-cd3c7873c383" (UID: "fb01c7fc-cb10-4308-a1bc-cd3c7873c383"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.175373 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fb01c7fc-cb10-4308-a1bc-cd3c7873c383" (UID: "fb01c7fc-cb10-4308-a1bc-cd3c7873c383"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.192678 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbzdq\" (UniqueName: \"kubernetes.io/projected/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-kube-api-access-jbzdq\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.192713 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.192723 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.193827 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fb01c7fc-cb10-4308-a1bc-cd3c7873c383" (UID: "fb01c7fc-cb10-4308-a1bc-cd3c7873c383"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.196697 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fb01c7fc-cb10-4308-a1bc-cd3c7873c383" (UID: "fb01c7fc-cb10-4308-a1bc-cd3c7873c383"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.295231 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.295261 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb01c7fc-cb10-4308-a1bc-cd3c7873c383-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.324132 4622 generic.go:334] "Generic (PLEG): container finished" podID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerID="80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b" exitCode=0 Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.324200 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.324216 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" event={"ID":"fb01c7fc-cb10-4308-a1bc-cd3c7873c383","Type":"ContainerDied","Data":"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.324248 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844b557b9c-rld8l" event={"ID":"fb01c7fc-cb10-4308-a1bc-cd3c7873c383","Type":"ContainerDied","Data":"0ef2e758c12c0df261bd124d3d15a7e0183785592edb4f4bc21991819ca95199"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.324265 4622 scope.go:117] "RemoveContainer" containerID="80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.330304 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" event={"ID":"e0174809-96a2-416e-82b9-9519d71a81d2","Type":"ContainerStarted","Data":"83ea6b68008191c366ff75f51ea31d6bdde013231fea8c41718aac55edd63d65"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.330334 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" event={"ID":"e0174809-96a2-416e-82b9-9519d71a81d2","Type":"ContainerStarted","Data":"c0efa274079bd3b71db0a4de3f00f503d611f0ecc36b8e75dbf84ab89257a18d"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.332392 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerStarted","Data":"6662d6d8dd2256242ac2133587256623c0b650e3b779dfac4dfbe2e356a1685d"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.334519 4622 generic.go:334] "Generic (PLEG): container finished" podID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerID="5b85fb6ac47bc65852b765ba1bc2af5008dd54b67cfee029150e396654fb8596" exitCode=0 Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.334566 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775457b975-fbhtg" event={"ID":"c506ee05-4c61-48c9-a61d-2a641b387abd","Type":"ContainerDied","Data":"5b85fb6ac47bc65852b765ba1bc2af5008dd54b67cfee029150e396654fb8596"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.334583 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775457b975-fbhtg" event={"ID":"c506ee05-4c61-48c9-a61d-2a641b387abd","Type":"ContainerStarted","Data":"4043a34847a6d241ccf89f27b78cfff405953bc07e0ac8df93284014dc8676c8"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.336022 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerStarted","Data":"49b1bc9fed90cdbda96706dd3bf1b2384f9131fd8535f9626c4732b48fa841d9"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.341286 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerStarted","Data":"25376bd2a468ef5a80d35102487302ecf83656db92f89bd944411bfc297c3fbd"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.346260 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-755d984c7c-rw8ld" event={"ID":"8523271e-6368-4a2a-b81e-fd513c441cb6","Type":"ContainerStarted","Data":"c04ed5f8e889b2e6c1a0fbc62b8a47374ad48400e302d47a515d3089e2b6861e"} Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.348421 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7445bd57d8-4bsbs" podStartSLOduration=2.717719789 podStartE2EDuration="4.348389288s" podCreationTimestamp="2025-11-26 11:27:22 +0000 UTC" firstStartedPulling="2025-11-26 11:27:23.443750322 +0000 UTC m=+1003.034961844" lastFinishedPulling="2025-11-26 11:27:25.074419821 +0000 UTC m=+1004.665631343" observedRunningTime="2025-11-26 11:27:26.343749905 +0000 UTC m=+1005.934961457" watchObservedRunningTime="2025-11-26 11:27:26.348389288 +0000 UTC m=+1005.939600810" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.351125 4622 scope.go:117] "RemoveContainer" containerID="9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.391611 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.393625 4622 scope.go:117] "RemoveContainer" containerID="80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b" Nov 26 11:27:26 crc kubenswrapper[4622]: E1126 11:27:26.394025 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b\": container with ID starting with 80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b not found: ID does not exist" containerID="80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.394060 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b"} err="failed to get container status \"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b\": rpc error: code = NotFound desc = could not find container \"80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b\": container with ID starting with 80f1dd3337d41f45b70507115b3c7f351228ca1c43285954e09c745c047b152b not found: ID does not exist" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.394087 4622 scope.go:117] "RemoveContainer" containerID="9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56" Nov 26 11:27:26 crc kubenswrapper[4622]: E1126 11:27:26.394454 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56\": container with ID starting with 9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56 not found: ID does not exist" containerID="9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.394483 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56"} err="failed to get container status \"9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56\": rpc error: code = NotFound desc = could not find container \"9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56\": container with ID starting with 9c35e2689cc301f640100edb5a576253942304aad3c66194c904920b79d99f56 not found: ID does not exist" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.397991 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-844b557b9c-rld8l"] Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.591265 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:27:26 crc kubenswrapper[4622]: I1126 11:27:26.719629 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" path="/var/lib/kubelet/pods/fb01c7fc-cb10-4308-a1bc-cd3c7873c383/volumes" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.384047 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerStarted","Data":"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.384329 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerStarted","Data":"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.384494 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.388120 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775457b975-fbhtg" event={"ID":"c506ee05-4c61-48c9-a61d-2a641b387abd","Type":"ContainerStarted","Data":"0c38a8a7f7d9958ccd9440d88525c72312c8275c676617163374cfb72bde4190"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.388249 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.393545 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerStarted","Data":"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.419101 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.419082009 podStartE2EDuration="3.419082009s" podCreationTimestamp="2025-11-26 11:27:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:27.402711059 +0000 UTC m=+1006.993922582" watchObservedRunningTime="2025-11-26 11:27:27.419082009 +0000 UTC m=+1007.010293530" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.424827 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerStarted","Data":"88f901acb298622545ba8c960955c5aac61d8319be5599a6439161233f45300a"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.424880 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.445436 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-755d984c7c-rw8ld" event={"ID":"8523271e-6368-4a2a-b81e-fd513c441cb6","Type":"ContainerStarted","Data":"0e725f79cc5d3206da3051566afaf44af466ff6a0c65d3af860145b54b37fad8"} Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.453593 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-775457b975-fbhtg" podStartSLOduration=3.453566525 podStartE2EDuration="3.453566525s" podCreationTimestamp="2025-11-26 11:27:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:27.430526001 +0000 UTC m=+1007.021737523" watchObservedRunningTime="2025-11-26 11:27:27.453566525 +0000 UTC m=+1007.044778047" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.458762 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8681658049999998 podStartE2EDuration="6.458746308s" podCreationTimestamp="2025-11-26 11:27:21 +0000 UTC" firstStartedPulling="2025-11-26 11:27:22.288076929 +0000 UTC m=+1001.879288451" lastFinishedPulling="2025-11-26 11:27:26.878657432 +0000 UTC m=+1006.469868954" observedRunningTime="2025-11-26 11:27:27.446980077 +0000 UTC m=+1007.038191599" watchObservedRunningTime="2025-11-26 11:27:27.458746308 +0000 UTC m=+1007.049957830" Nov 26 11:27:27 crc kubenswrapper[4622]: I1126 11:27:27.484400 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-755d984c7c-rw8ld" podStartSLOduration=3.703531996 podStartE2EDuration="5.484387829s" podCreationTimestamp="2025-11-26 11:27:22 +0000 UTC" firstStartedPulling="2025-11-26 11:27:24.207526014 +0000 UTC m=+1003.798737536" lastFinishedPulling="2025-11-26 11:27:25.988381858 +0000 UTC m=+1005.579593369" observedRunningTime="2025-11-26 11:27:27.464759213 +0000 UTC m=+1007.055970735" watchObservedRunningTime="2025-11-26 11:27:27.484387829 +0000 UTC m=+1007.075599341" Nov 26 11:27:28 crc kubenswrapper[4622]: I1126 11:27:28.468298 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerStarted","Data":"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178"} Nov 26 11:27:28 crc kubenswrapper[4622]: I1126 11:27:28.491266 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.381645292 podStartE2EDuration="4.491244141s" podCreationTimestamp="2025-11-26 11:27:24 +0000 UTC" firstStartedPulling="2025-11-26 11:27:25.556655315 +0000 UTC m=+1005.147866837" lastFinishedPulling="2025-11-26 11:27:26.666254164 +0000 UTC m=+1006.257465686" observedRunningTime="2025-11-26 11:27:28.482076032 +0000 UTC m=+1008.073287554" watchObservedRunningTime="2025-11-26 11:27:28.491244141 +0000 UTC m=+1008.082455663" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.058293 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.084317 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6b5456649c-2jjn8" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.151750 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.151985 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-78d76c8f88-kbpmq" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-api" containerID="cri-o://99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a" gracePeriod=30 Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.152402 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-78d76c8f88-kbpmq" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-httpd" containerID="cri-o://e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139" gracePeriod=30 Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.474174 4622 generic.go:334] "Generic (PLEG): container finished" podID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerID="e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139" exitCode=0 Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.474253 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerDied","Data":"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139"} Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.474843 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api-log" containerID="cri-o://64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" gracePeriod=30 Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.474938 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api" containerID="cri-o://269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" gracePeriod=30 Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.734340 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-cc95b6f5d-n86gp"] Nov 26 11:27:29 crc kubenswrapper[4622]: E1126 11:27:29.734683 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="init" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.734702 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="init" Nov 26 11:27:29 crc kubenswrapper[4622]: E1126 11:27:29.734722 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="dnsmasq-dns" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.734727 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="dnsmasq-dns" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.734899 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb01c7fc-cb10-4308-a1bc-cd3c7873c383" containerName="dnsmasq-dns" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.735794 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.739447 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.739625 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.746496 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cc95b6f5d-n86gp"] Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.810750 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.849229 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.883592 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-internal-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.883830 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b07c19ec-e29f-4d61-b665-c5d66e37d79f-logs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.883930 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-combined-ca-bundle\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.884044 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.884114 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data-custom\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.884243 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-public-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:29 crc kubenswrapper[4622]: I1126 11:27:29.884317 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9p8r\" (UniqueName: \"kubernetes.io/projected/b07c19ec-e29f-4d61-b665-c5d66e37d79f-kube-api-access-v9p8r\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.006675 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-internal-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007316 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b07c19ec-e29f-4d61-b665-c5d66e37d79f-logs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007400 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-combined-ca-bundle\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007558 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007598 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data-custom\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007624 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-public-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.007641 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9p8r\" (UniqueName: \"kubernetes.io/projected/b07c19ec-e29f-4d61-b665-c5d66e37d79f-kube-api-access-v9p8r\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.008011 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b07c19ec-e29f-4d61-b665-c5d66e37d79f-logs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.016029 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-public-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.017179 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data-custom\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.017757 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-config-data\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.021317 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-internal-tls-certs\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.022088 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07c19ec-e29f-4d61-b665-c5d66e37d79f-combined-ca-bundle\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.033981 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9p8r\" (UniqueName: \"kubernetes.io/projected/b07c19ec-e29f-4d61-b665-c5d66e37d79f-kube-api-access-v9p8r\") pod \"barbican-api-cc95b6f5d-n86gp\" (UID: \"b07c19ec-e29f-4d61-b665-c5d66e37d79f\") " pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.087357 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.105246 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211124 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211184 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211259 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211284 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211429 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211445 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.211536 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djbf5\" (UniqueName: \"kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5\") pod \"55256700-597f-475c-ba51-686928866fe0\" (UID: \"55256700-597f-475c-ba51-686928866fe0\") " Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.212627 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.214458 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs" (OuterVolumeSpecName: "logs") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.217080 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5" (OuterVolumeSpecName: "kube-api-access-djbf5") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "kube-api-access-djbf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.225589 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.228850 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts" (OuterVolumeSpecName: "scripts") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.243306 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.306445 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.315727 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55256700-597f-475c-ba51-686928866fe0-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.315876 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djbf5\" (UniqueName: \"kubernetes.io/projected/55256700-597f-475c-ba51-686928866fe0-kube-api-access-djbf5\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.315913 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.315933 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55256700-597f-475c-ba51-686928866fe0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.315953 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.316020 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.316119 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data" (OuterVolumeSpecName: "config-data") pod "55256700-597f-475c-ba51-686928866fe0" (UID: "55256700-597f-475c-ba51-686928866fe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.374316 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.412022 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.422119 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55256700-597f-475c-ba51-686928866fe0-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.507661 4622 generic.go:334] "Generic (PLEG): container finished" podID="55256700-597f-475c-ba51-686928866fe0" containerID="269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" exitCode=0 Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.507698 4622 generic.go:334] "Generic (PLEG): container finished" podID="55256700-597f-475c-ba51-686928866fe0" containerID="64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" exitCode=143 Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.508686 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.510342 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerDied","Data":"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a"} Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.510405 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerDied","Data":"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191"} Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.510418 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"55256700-597f-475c-ba51-686928866fe0","Type":"ContainerDied","Data":"6662d6d8dd2256242ac2133587256623c0b650e3b779dfac4dfbe2e356a1685d"} Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.510437 4622 scope.go:117] "RemoveContainer" containerID="269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.548105 4622 scope.go:117] "RemoveContainer" containerID="64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.593990 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.601916 4622 scope.go:117] "RemoveContainer" containerID="269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" Nov 26 11:27:30 crc kubenswrapper[4622]: E1126 11:27:30.603477 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a\": container with ID starting with 269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a not found: ID does not exist" containerID="269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.603531 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a"} err="failed to get container status \"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a\": rpc error: code = NotFound desc = could not find container \"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a\": container with ID starting with 269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a not found: ID does not exist" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.603559 4622 scope.go:117] "RemoveContainer" containerID="64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" Nov 26 11:27:30 crc kubenswrapper[4622]: E1126 11:27:30.604639 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191\": container with ID starting with 64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191 not found: ID does not exist" containerID="64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.604671 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191"} err="failed to get container status \"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191\": rpc error: code = NotFound desc = could not find container \"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191\": container with ID starting with 64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191 not found: ID does not exist" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.604690 4622 scope.go:117] "RemoveContainer" containerID="269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.605894 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a"} err="failed to get container status \"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a\": rpc error: code = NotFound desc = could not find container \"269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a\": container with ID starting with 269d9dd6495bd304a5a3b173a1c57c9851e9419fb93641c57cae6e5988af1c9a not found: ID does not exist" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.605939 4622 scope.go:117] "RemoveContainer" containerID="64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.609557 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.609920 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191"} err="failed to get container status \"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191\": rpc error: code = NotFound desc = could not find container \"64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191\": container with ID starting with 64c9cfe9bd8fdfaed280b1703d3ec3ee776aadf21968100610b9c3a7efbda191 not found: ID does not exist" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.626560 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:30 crc kubenswrapper[4622]: E1126 11:27:30.627050 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api-log" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.627069 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api-log" Nov 26 11:27:30 crc kubenswrapper[4622]: E1126 11:27:30.627098 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.627106 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.627306 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api-log" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.627326 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="55256700-597f-475c-ba51-686928866fe0" containerName="cinder-api" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.628290 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.634969 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.635132 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.635265 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.645981 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cc95b6f5d-n86gp"] Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.658577 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.715170 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55256700-597f-475c-ba51-686928866fe0" path="/var/lib/kubelet/pods/55256700-597f-475c-ba51-686928866fe0/volumes" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741442 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741565 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-scripts\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741584 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741600 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741695 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741755 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31b46baf-53d8-4837-9834-4e465b305b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741776 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31b46baf-53d8-4837-9834-4e465b305b6d-logs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741826 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.741840 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn4q6\" (UniqueName: \"kubernetes.io/projected/31b46baf-53d8-4837-9834-4e465b305b6d-kube-api-access-wn4q6\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843384 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-scripts\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843616 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843643 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843711 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843757 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31b46baf-53d8-4837-9834-4e465b305b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843778 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31b46baf-53d8-4837-9834-4e465b305b6d-logs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843819 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843837 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn4q6\" (UniqueName: \"kubernetes.io/projected/31b46baf-53d8-4837-9834-4e465b305b6d-kube-api-access-wn4q6\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.843858 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.844703 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31b46baf-53d8-4837-9834-4e465b305b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.845973 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31b46baf-53d8-4837-9834-4e465b305b6d-logs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.850227 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.853739 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-scripts\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.854010 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.854316 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-config-data\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.860387 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.860968 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b46baf-53d8-4837-9834-4e465b305b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.865690 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn4q6\" (UniqueName: \"kubernetes.io/projected/31b46baf-53d8-4837-9834-4e465b305b6d-kube-api-access-wn4q6\") pod \"cinder-api-0\" (UID: \"31b46baf-53d8-4837-9834-4e465b305b6d\") " pod="openstack/cinder-api-0" Nov 26 11:27:30 crc kubenswrapper[4622]: I1126 11:27:30.971153 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.360021 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 26 11:27:31 crc kubenswrapper[4622]: W1126 11:27:31.366137 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31b46baf_53d8_4837_9834_4e465b305b6d.slice/crio-2994e3861bbd4825e397169b09e302f1abf6165e4534483c02b62001116852a7 WatchSource:0}: Error finding container 2994e3861bbd4825e397169b09e302f1abf6165e4534483c02b62001116852a7: Status 404 returned error can't find the container with id 2994e3861bbd4825e397169b09e302f1abf6165e4534483c02b62001116852a7 Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.519302 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"31b46baf-53d8-4837-9834-4e465b305b6d","Type":"ContainerStarted","Data":"2994e3861bbd4825e397169b09e302f1abf6165e4534483c02b62001116852a7"} Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.531559 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cc95b6f5d-n86gp" event={"ID":"b07c19ec-e29f-4d61-b665-c5d66e37d79f","Type":"ContainerStarted","Data":"6142452d24a00f9f52459c6abce98dd519ef085a5fb8751e7afe8949cedf4853"} Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.531611 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cc95b6f5d-n86gp" event={"ID":"b07c19ec-e29f-4d61-b665-c5d66e37d79f","Type":"ContainerStarted","Data":"3f7287387aa6743f3c237766e123ee3afdbea184777632f6dcdc42977bf9966b"} Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.531623 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cc95b6f5d-n86gp" event={"ID":"b07c19ec-e29f-4d61-b665-c5d66e37d79f","Type":"ContainerStarted","Data":"17ee5d0c1580dea403e71a64be0a94247568f8097b9879be69af4eed3bc0d965"} Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.532656 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.532696 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.548168 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-cc95b6f5d-n86gp" podStartSLOduration=2.548155177 podStartE2EDuration="2.548155177s" podCreationTimestamp="2025-11-26 11:27:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:31.546674403 +0000 UTC m=+1011.137885925" watchObservedRunningTime="2025-11-26 11:27:31.548155177 +0000 UTC m=+1011.139366699" Nov 26 11:27:31 crc kubenswrapper[4622]: I1126 11:27:31.664035 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.252684 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-bc6fbfc58-jv7cz" Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.319048 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.562663 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"31b46baf-53d8-4837-9834-4e465b305b6d","Type":"ContainerStarted","Data":"1d0d99000ca1c80a182fa259bf066abd681ff682593487d0d8767f0199884611"} Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.563022 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"31b46baf-53d8-4837-9834-4e465b305b6d","Type":"ContainerStarted","Data":"435d5d03afe042bacd7107a1789780ec256fd0bc774f73842595cc5185473f0c"} Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.563054 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon-log" containerID="cri-o://ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a" gracePeriod=30 Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.563451 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" containerID="cri-o://db66f6b6e8a9d8a3409bda6d5204063e52dfe897279be8e132b9d8af69263fd6" gracePeriod=30 Nov 26 11:27:32 crc kubenswrapper[4622]: I1126 11:27:32.589650 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.5896321220000003 podStartE2EDuration="2.589632122s" podCreationTimestamp="2025-11-26 11:27:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:32.581689807 +0000 UTC m=+1012.172901329" watchObservedRunningTime="2025-11-26 11:27:32.589632122 +0000 UTC m=+1012.180843644" Nov 26 11:27:33 crc kubenswrapper[4622]: I1126 11:27:33.569747 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.340886 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.412354 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525691 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key\") pod \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525770 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq44h\" (UniqueName: \"kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h\") pod \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525825 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key\") pod \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525861 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tpqm\" (UniqueName: \"kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm\") pod \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525918 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs\") pod \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.525991 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts\") pod \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.526015 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data\") pod \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.526063 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs\") pod \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.526170 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts\") pod \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\" (UID: \"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.526212 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data\") pod \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\" (UID: \"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2\") " Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.528650 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs" (OuterVolumeSpecName: "logs") pod "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" (UID: "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.528684 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs" (OuterVolumeSpecName: "logs") pod "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" (UID: "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.534713 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm" (OuterVolumeSpecName: "kube-api-access-8tpqm") pod "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" (UID: "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2"). InnerVolumeSpecName "kube-api-access-8tpqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.534770 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h" (OuterVolumeSpecName: "kube-api-access-bq44h") pod "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" (UID: "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0"). InnerVolumeSpecName "kube-api-access-bq44h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.535276 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" (UID: "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.535304 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" (UID: "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.551984 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts" (OuterVolumeSpecName: "scripts") pod "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" (UID: "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.552064 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data" (OuterVolumeSpecName: "config-data") pod "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" (UID: "4ec3ddcf-99b7-4529-ab0a-ac52a138edf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.553549 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts" (OuterVolumeSpecName: "scripts") pod "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" (UID: "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.558776 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data" (OuterVolumeSpecName: "config-data") pod "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" (UID: "f7a6ec8b-bc72-4776-89c4-d68e367d4dc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581800 4622 generic.go:334] "Generic (PLEG): container finished" podID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerID="ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" exitCode=137 Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581842 4622 generic.go:334] "Generic (PLEG): container finished" podID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerID="00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" exitCode=137 Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581910 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerDied","Data":"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581965 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerDied","Data":"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581978 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76c94b49cc-l575g" event={"ID":"4ec3ddcf-99b7-4529-ab0a-ac52a138edf2","Type":"ContainerDied","Data":"b6ab66a642764da6543f23bf2302c003a077787c5a9967acb56a34c55b690d63"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.581997 4622 scope.go:117] "RemoveContainer" containerID="ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.583178 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76c94b49cc-l575g" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.583888 4622 generic.go:334] "Generic (PLEG): container finished" podID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerID="24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" exitCode=137 Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.583923 4622 generic.go:334] "Generic (PLEG): container finished" podID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerID="a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" exitCode=137 Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.583967 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerDied","Data":"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.583992 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerDied","Data":"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.584003 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-868fc87bbf-49ntq" event={"ID":"f7a6ec8b-bc72-4776-89c4-d68e367d4dc0","Type":"ContainerDied","Data":"cf0a8f2725737b32302119bfbefce4fcc0252abd138d91875c6611b2f6df4fa6"} Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.584951 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-868fc87bbf-49ntq" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.624223 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629134 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tpqm\" (UniqueName: \"kubernetes.io/projected/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-kube-api-access-8tpqm\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629169 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629181 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629206 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629218 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629227 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629235 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629246 4622 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629256 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq44h\" (UniqueName: \"kubernetes.io/projected/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0-kube-api-access-bq44h\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.629266 4622 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.630136 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-76c94b49cc-l575g"] Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.637023 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.642087 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-868fc87bbf-49ntq"] Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.715611 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" path="/var/lib/kubelet/pods/4ec3ddcf-99b7-4529-ab0a-ac52a138edf2/volumes" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.716307 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" path="/var/lib/kubelet/pods/f7a6ec8b-bc72-4776-89c4-d68e367d4dc0/volumes" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.765131 4622 scope.go:117] "RemoveContainer" containerID="00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.791451 4622 scope.go:117] "RemoveContainer" containerID="ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" Nov 26 11:27:34 crc kubenswrapper[4622]: E1126 11:27:34.791867 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7\": container with ID starting with ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7 not found: ID does not exist" containerID="ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.791919 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7"} err="failed to get container status \"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7\": rpc error: code = NotFound desc = could not find container \"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7\": container with ID starting with ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7 not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.791968 4622 scope.go:117] "RemoveContainer" containerID="00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" Nov 26 11:27:34 crc kubenswrapper[4622]: E1126 11:27:34.792338 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb\": container with ID starting with 00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb not found: ID does not exist" containerID="00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.792365 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb"} err="failed to get container status \"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb\": rpc error: code = NotFound desc = could not find container \"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb\": container with ID starting with 00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.792379 4622 scope.go:117] "RemoveContainer" containerID="ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.792940 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7"} err="failed to get container status \"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7\": rpc error: code = NotFound desc = could not find container \"ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7\": container with ID starting with ceb047d1b4263b2137654c78f57151a037d3b3b21e07e4b3311cfad13047d6d7 not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.792963 4622 scope.go:117] "RemoveContainer" containerID="00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.793156 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb"} err="failed to get container status \"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb\": rpc error: code = NotFound desc = could not find container \"00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb\": container with ID starting with 00b564159284ed4ac7ead498aea12bd7f4c9120acd41af6b1ebf42469cdd0bbb not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.793177 4622 scope.go:117] "RemoveContainer" containerID="24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.875687 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.922395 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.922664 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="dnsmasq-dns" containerID="cri-o://2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f" gracePeriod=10 Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.953664 4622 scope.go:117] "RemoveContainer" containerID="a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.981976 4622 scope.go:117] "RemoveContainer" containerID="24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" Nov 26 11:27:34 crc kubenswrapper[4622]: E1126 11:27:34.983869 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf\": container with ID starting with 24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf not found: ID does not exist" containerID="24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.983914 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf"} err="failed to get container status \"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf\": rpc error: code = NotFound desc = could not find container \"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf\": container with ID starting with 24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.983941 4622 scope.go:117] "RemoveContainer" containerID="a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" Nov 26 11:27:34 crc kubenswrapper[4622]: E1126 11:27:34.984233 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e\": container with ID starting with a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e not found: ID does not exist" containerID="a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.984287 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e"} err="failed to get container status \"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e\": rpc error: code = NotFound desc = could not find container \"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e\": container with ID starting with a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.984315 4622 scope.go:117] "RemoveContainer" containerID="24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.984751 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf"} err="failed to get container status \"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf\": rpc error: code = NotFound desc = could not find container \"24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf\": container with ID starting with 24acf1f885568556f49649800bffb677a531b3f182f46769c9d8488b100c3adf not found: ID does not exist" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.984773 4622 scope.go:117] "RemoveContainer" containerID="a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e" Nov 26 11:27:34 crc kubenswrapper[4622]: I1126 11:27:34.985126 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e"} err="failed to get container status \"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e\": rpc error: code = NotFound desc = could not find container \"a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e\": container with ID starting with a78ef9a28e0bd72cddf880944a2e81533f094fa1219b69ae93d39368550f285e not found: ID does not exist" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.082841 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.133285 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.369310 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.447114 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config\") pod \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.447427 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v587\" (UniqueName: \"kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587\") pod \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.447514 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb\") pod \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.447558 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb\") pod \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.447624 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc\") pod \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\" (UID: \"0f5a5dad-9d9e-439d-9946-43da9af3caf2\") " Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.454727 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587" (OuterVolumeSpecName: "kube-api-access-6v587") pod "0f5a5dad-9d9e-439d-9946-43da9af3caf2" (UID: "0f5a5dad-9d9e-439d-9946-43da9af3caf2"). InnerVolumeSpecName "kube-api-access-6v587". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.497711 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0f5a5dad-9d9e-439d-9946-43da9af3caf2" (UID: "0f5a5dad-9d9e-439d-9946-43da9af3caf2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.499165 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0f5a5dad-9d9e-439d-9946-43da9af3caf2" (UID: "0f5a5dad-9d9e-439d-9946-43da9af3caf2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.506154 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0f5a5dad-9d9e-439d-9946-43da9af3caf2" (UID: "0f5a5dad-9d9e-439d-9946-43da9af3caf2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.509929 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config" (OuterVolumeSpecName: "config") pod "0f5a5dad-9d9e-439d-9946-43da9af3caf2" (UID: "0f5a5dad-9d9e-439d-9946-43da9af3caf2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.550102 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v587\" (UniqueName: \"kubernetes.io/projected/0f5a5dad-9d9e-439d-9946-43da9af3caf2-kube-api-access-6v587\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.550245 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.550303 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.550373 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.550424 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f5a5dad-9d9e-439d-9946-43da9af3caf2-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.594520 4622 generic.go:334] "Generic (PLEG): container finished" podID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerID="2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f" exitCode=0 Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.594590 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" event={"ID":"0f5a5dad-9d9e-439d-9946-43da9af3caf2","Type":"ContainerDied","Data":"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f"} Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.594624 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" event={"ID":"0f5a5dad-9d9e-439d-9946-43da9af3caf2","Type":"ContainerDied","Data":"94781550ec460540bbdffb095bc0344cc94fdca2e36777b915fa1ae31b8cb67b"} Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.594642 4622 scope.go:117] "RemoveContainer" containerID="2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.594596 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6677d66f85-cm5jg" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.597418 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="cinder-scheduler" containerID="cri-o://bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d" gracePeriod=30 Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.597535 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="probe" containerID="cri-o://e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178" gracePeriod=30 Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.627281 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.628996 4622 scope.go:117] "RemoveContainer" containerID="ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.633872 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6677d66f85-cm5jg"] Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.654747 4622 scope.go:117] "RemoveContainer" containerID="2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f" Nov 26 11:27:35 crc kubenswrapper[4622]: E1126 11:27:35.655291 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f\": container with ID starting with 2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f not found: ID does not exist" containerID="2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.655336 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f"} err="failed to get container status \"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f\": rpc error: code = NotFound desc = could not find container \"2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f\": container with ID starting with 2fa86a5fe79538f8a4b27e13c35f799e9a8a45108f4c145b622950dfd6188f6f not found: ID does not exist" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.655368 4622 scope.go:117] "RemoveContainer" containerID="ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1" Nov 26 11:27:35 crc kubenswrapper[4622]: E1126 11:27:35.655889 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1\": container with ID starting with ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1 not found: ID does not exist" containerID="ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1" Nov 26 11:27:35 crc kubenswrapper[4622]: I1126 11:27:35.655914 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1"} err="failed to get container status \"ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1\": rpc error: code = NotFound desc = could not find container \"ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1\": container with ID starting with ffba0813d39211460a07d9daf8698b3945207629995d159c6ec1061aeef43da1 not found: ID does not exist" Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.375260 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.408972 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cc95b6f5d-n86gp" Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.471438 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.471866 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-649b8ddf5b-rsqxp" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api" containerID="cri-o://77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657" gracePeriod=30 Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.471728 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-649b8ddf5b-rsqxp" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api-log" containerID="cri-o://bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e" gracePeriod=30 Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.612809 4622 generic.go:334] "Generic (PLEG): container finished" podID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerID="bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e" exitCode=143 Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.612887 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerDied","Data":"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e"} Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.623948 4622 generic.go:334] "Generic (PLEG): container finished" podID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerID="e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178" exitCode=0 Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.624013 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerDied","Data":"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178"} Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.629269 4622 generic.go:334] "Generic (PLEG): container finished" podID="85b663a5-e13e-4653-a4bb-340952a968c9" containerID="db66f6b6e8a9d8a3409bda6d5204063e52dfe897279be8e132b9d8af69263fd6" exitCode=0 Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.629368 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerDied","Data":"db66f6b6e8a9d8a3409bda6d5204063e52dfe897279be8e132b9d8af69263fd6"} Nov 26 11:27:36 crc kubenswrapper[4622]: I1126 11:27:36.721239 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" path="/var/lib/kubelet/pods/0f5a5dad-9d9e-439d-9946-43da9af3caf2/volumes" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.030626 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.410468 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.415998 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518172 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p52k4\" (UniqueName: \"kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518315 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518376 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h4kd\" (UniqueName: \"kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd\") pod \"4666c67e-2196-45f5-a34d-f6d08a51fd67\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518395 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518417 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle\") pod \"4666c67e-2196-45f5-a34d-f6d08a51fd67\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518436 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518454 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config\") pod \"4666c67e-2196-45f5-a34d-f6d08a51fd67\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518482 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs\") pod \"4666c67e-2196-45f5-a34d-f6d08a51fd67\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518587 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518646 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config\") pod \"4666c67e-2196-45f5-a34d-f6d08a51fd67\" (UID: \"4666c67e-2196-45f5-a34d-f6d08a51fd67\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.518689 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle\") pod \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\" (UID: \"52301b8d-a03d-4a3e-af50-bc7d80f9b925\") " Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.520847 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.525196 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd" (OuterVolumeSpecName: "kube-api-access-9h4kd") pod "4666c67e-2196-45f5-a34d-f6d08a51fd67" (UID: "4666c67e-2196-45f5-a34d-f6d08a51fd67"). InnerVolumeSpecName "kube-api-access-9h4kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.525445 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4" (OuterVolumeSpecName: "kube-api-access-p52k4") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "kube-api-access-p52k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.526752 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts" (OuterVolumeSpecName: "scripts") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.527318 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.528107 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4666c67e-2196-45f5-a34d-f6d08a51fd67" (UID: "4666c67e-2196-45f5-a34d-f6d08a51fd67"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.562011 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.572144 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config" (OuterVolumeSpecName: "config") pod "4666c67e-2196-45f5-a34d-f6d08a51fd67" (UID: "4666c67e-2196-45f5-a34d-f6d08a51fd67"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.574411 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4666c67e-2196-45f5-a34d-f6d08a51fd67" (UID: "4666c67e-2196-45f5-a34d-f6d08a51fd67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.590871 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4666c67e-2196-45f5-a34d-f6d08a51fd67" (UID: "4666c67e-2196-45f5-a34d-f6d08a51fd67"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.601339 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data" (OuterVolumeSpecName: "config-data") pod "52301b8d-a03d-4a3e-af50-bc7d80f9b925" (UID: "52301b8d-a03d-4a3e-af50-bc7d80f9b925"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623314 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623386 4622 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623402 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623417 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p52k4\" (UniqueName: \"kubernetes.io/projected/52301b8d-a03d-4a3e-af50-bc7d80f9b925-kube-api-access-p52k4\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623429 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52301b8d-a03d-4a3e-af50-bc7d80f9b925-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623439 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h4kd\" (UniqueName: \"kubernetes.io/projected/4666c67e-2196-45f5-a34d-f6d08a51fd67-kube-api-access-9h4kd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623448 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623474 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623483 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52301b8d-a03d-4a3e-af50-bc7d80f9b925-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623492 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.623514 4622 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4666c67e-2196-45f5-a34d-f6d08a51fd67-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.658430 4622 generic.go:334] "Generic (PLEG): container finished" podID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerID="99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a" exitCode=0 Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.658482 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-78d76c8f88-kbpmq" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.658512 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerDied","Data":"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a"} Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.658547 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-78d76c8f88-kbpmq" event={"ID":"4666c67e-2196-45f5-a34d-f6d08a51fd67","Type":"ContainerDied","Data":"9f3132ffd108d109a6ccd4000ac08df58f3d4e9085727ddc54d6cf6f2cabfc4a"} Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.658580 4622 scope.go:117] "RemoveContainer" containerID="e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.661698 4622 generic.go:334] "Generic (PLEG): container finished" podID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerID="bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d" exitCode=0 Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.661721 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerDied","Data":"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d"} Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.661738 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"52301b8d-a03d-4a3e-af50-bc7d80f9b925","Type":"ContainerDied","Data":"49b1bc9fed90cdbda96706dd3bf1b2384f9131fd8535f9626c4732b48fa841d9"} Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.661758 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.686316 4622 scope.go:117] "RemoveContainer" containerID="99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.703552 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.726594 4622 scope.go:117] "RemoveContainer" containerID="e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.727103 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139\": container with ID starting with e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139 not found: ID does not exist" containerID="e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.727148 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139"} err="failed to get container status \"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139\": rpc error: code = NotFound desc = could not find container \"e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139\": container with ID starting with e381f265da20dd91c24199cc7523623a3c6dede208fb2d534b7ef0ae15bc7139 not found: ID does not exist" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.727178 4622 scope.go:117] "RemoveContainer" containerID="99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.733900 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a\": container with ID starting with 99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a not found: ID does not exist" containerID="99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.733945 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a"} err="failed to get container status \"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a\": rpc error: code = NotFound desc = could not find container \"99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a\": container with ID starting with 99782de7a4c346220d786092168978d474e5e74631c27b06c163036fedbd7a1a not found: ID does not exist" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.733969 4622 scope.go:117] "RemoveContainer" containerID="e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.735643 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.740229 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.757132 4622 scope.go:117] "RemoveContainer" containerID="bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.760591 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-78d76c8f88-kbpmq"] Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768444 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768884 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768906 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768919 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="cinder-scheduler" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768927 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="cinder-scheduler" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768939 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="probe" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768944 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="probe" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768960 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768965 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768972 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-httpd" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768977 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-httpd" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768984 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="init" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.768989 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="init" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.768998 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-api" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769005 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-api" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.769017 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769022 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.769041 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769046 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.769062 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="dnsmasq-dns" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769067 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="dnsmasq-dns" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769269 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-httpd" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769284 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f5a5dad-9d9e-439d-9946-43da9af3caf2" containerName="dnsmasq-dns" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769292 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769308 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ec3ddcf-99b7-4529-ab0a-ac52a138edf2" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769316 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="probe" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769325 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" containerName="cinder-scheduler" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769331 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769346 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a6ec8b-bc72-4776-89c4-d68e367d4dc0" containerName="horizon-log" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.769354 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" containerName="neutron-api" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.770300 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.772549 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.776709 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.776985 4622 scope.go:117] "RemoveContainer" containerID="e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.777373 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178\": container with ID starting with e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178 not found: ID does not exist" containerID="e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.777404 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178"} err="failed to get container status \"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178\": rpc error: code = NotFound desc = could not find container \"e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178\": container with ID starting with e90bc949be602b95e1c3ee07fd1b11bdf0057f94fe22c0dda15f07cc4669e178 not found: ID does not exist" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.777423 4622 scope.go:117] "RemoveContainer" containerID="bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d" Nov 26 11:27:38 crc kubenswrapper[4622]: E1126 11:27:38.777704 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d\": container with ID starting with bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d not found: ID does not exist" containerID="bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.777741 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d"} err="failed to get container status \"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d\": rpc error: code = NotFound desc = could not find container \"bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d\": container with ID starting with bc47f8b951acea822fc5c10e6f256c3d25f4e90ed97d3b6634b7ecb8da848e7d not found: ID does not exist" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.930635 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-scripts\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.931114 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nccmn\" (UniqueName: \"kubernetes.io/projected/a603a685-c7c9-4ce2-b5dc-d198f02049ed-kube-api-access-nccmn\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.931153 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.931223 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a603a685-c7c9-4ce2-b5dc-d198f02049ed-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.931396 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:38 crc kubenswrapper[4622]: I1126 11:27:38.931493 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033722 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-scripts\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033788 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nccmn\" (UniqueName: \"kubernetes.io/projected/a603a685-c7c9-4ce2-b5dc-d198f02049ed-kube-api-access-nccmn\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033819 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033873 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a603a685-c7c9-4ce2-b5dc-d198f02049ed-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033925 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.033986 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a603a685-c7c9-4ce2-b5dc-d198f02049ed-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.039102 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.039216 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.039559 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-config-data\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.039820 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a603a685-c7c9-4ce2-b5dc-d198f02049ed-scripts\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.050318 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nccmn\" (UniqueName: \"kubernetes.io/projected/a603a685-c7c9-4ce2-b5dc-d198f02049ed-kube-api-access-nccmn\") pod \"cinder-scheduler-0\" (UID: \"a603a685-c7c9-4ce2-b5dc-d198f02049ed\") " pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.087837 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.497614 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 26 11:27:39 crc kubenswrapper[4622]: W1126 11:27:39.500142 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda603a685_c7c9_4ce2_b5dc_d198f02049ed.slice/crio-ddfac9410aff2fa1bdcf53c4250d4ada90e3086b4b67379f98e8ddce2959341b WatchSource:0}: Error finding container ddfac9410aff2fa1bdcf53c4250d4ada90e3086b4b67379f98e8ddce2959341b: Status 404 returned error can't find the container with id ddfac9410aff2fa1bdcf53c4250d4ada90e3086b4b67379f98e8ddce2959341b Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.630454 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-649b8ddf5b-rsqxp" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.152:9311/healthcheck\": read tcp 10.217.0.2:59384->10.217.0.152:9311: read: connection reset by peer" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.630516 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-649b8ddf5b-rsqxp" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.152:9311/healthcheck\": read tcp 10.217.0.2:59372->10.217.0.152:9311: read: connection reset by peer" Nov 26 11:27:39 crc kubenswrapper[4622]: I1126 11:27:39.680194 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a603a685-c7c9-4ce2-b5dc-d198f02049ed","Type":"ContainerStarted","Data":"ddfac9410aff2fa1bdcf53c4250d4ada90e3086b4b67379f98e8ddce2959341b"} Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.005493 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.165862 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom\") pod \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.166305 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs\") pod \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.166394 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data\") pod \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.166468 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srjrl\" (UniqueName: \"kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl\") pod \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.166673 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle\") pod \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\" (UID: \"20acddaa-a33d-4cb3-9a25-cc0f88aafca7\") " Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.167532 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs" (OuterVolumeSpecName: "logs") pod "20acddaa-a33d-4cb3-9a25-cc0f88aafca7" (UID: "20acddaa-a33d-4cb3-9a25-cc0f88aafca7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.174632 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "20acddaa-a33d-4cb3-9a25-cc0f88aafca7" (UID: "20acddaa-a33d-4cb3-9a25-cc0f88aafca7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.174677 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl" (OuterVolumeSpecName: "kube-api-access-srjrl") pod "20acddaa-a33d-4cb3-9a25-cc0f88aafca7" (UID: "20acddaa-a33d-4cb3-9a25-cc0f88aafca7"). InnerVolumeSpecName "kube-api-access-srjrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.190724 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20acddaa-a33d-4cb3-9a25-cc0f88aafca7" (UID: "20acddaa-a33d-4cb3-9a25-cc0f88aafca7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.203754 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data" (OuterVolumeSpecName: "config-data") pod "20acddaa-a33d-4cb3-9a25-cc0f88aafca7" (UID: "20acddaa-a33d-4cb3-9a25-cc0f88aafca7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.271380 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.271425 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srjrl\" (UniqueName: \"kubernetes.io/projected/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-kube-api-access-srjrl\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.271448 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.271460 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.271472 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20acddaa-a33d-4cb3-9a25-cc0f88aafca7-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.693128 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a603a685-c7c9-4ce2-b5dc-d198f02049ed","Type":"ContainerStarted","Data":"b0cca67127fac53118bf670dc3708b55cddfa4017bce5224e1bee1d5922c3fef"} Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.693394 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a603a685-c7c9-4ce2-b5dc-d198f02049ed","Type":"ContainerStarted","Data":"97b3130a1002e43370b8f0b51d20de4fbd08a2dc9ff02e60de3f4a3143ea7eab"} Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.698020 4622 generic.go:334] "Generic (PLEG): container finished" podID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerID="77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657" exitCode=0 Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.698129 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerDied","Data":"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657"} Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.698277 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-649b8ddf5b-rsqxp" event={"ID":"20acddaa-a33d-4cb3-9a25-cc0f88aafca7","Type":"ContainerDied","Data":"50e27f65b451f1e2df4952c48edc1a35dc594ff956b5b0563657dbcf8cb01578"} Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.698309 4622 scope.go:117] "RemoveContainer" containerID="77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.698152 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-649b8ddf5b-rsqxp" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.721602 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.721582882 podStartE2EDuration="2.721582882s" podCreationTimestamp="2025-11-26 11:27:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:27:40.710093012 +0000 UTC m=+1020.301304535" watchObservedRunningTime="2025-11-26 11:27:40.721582882 +0000 UTC m=+1020.312794404" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.727891 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4666c67e-2196-45f5-a34d-f6d08a51fd67" path="/var/lib/kubelet/pods/4666c67e-2196-45f5-a34d-f6d08a51fd67/volumes" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.728916 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52301b8d-a03d-4a3e-af50-bc7d80f9b925" path="/var/lib/kubelet/pods/52301b8d-a03d-4a3e-af50-bc7d80f9b925/volumes" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.729861 4622 scope.go:117] "RemoveContainer" containerID="bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.748880 4622 scope.go:117] "RemoveContainer" containerID="77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657" Nov 26 11:27:40 crc kubenswrapper[4622]: E1126 11:27:40.749607 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657\": container with ID starting with 77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657 not found: ID does not exist" containerID="77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.749644 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657"} err="failed to get container status \"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657\": rpc error: code = NotFound desc = could not find container \"77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657\": container with ID starting with 77acbfee0613c78486fd98aab47cfb51bf09e3fa2e8285df63c81f27cfc81657 not found: ID does not exist" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.749673 4622 scope.go:117] "RemoveContainer" containerID="bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e" Nov 26 11:27:40 crc kubenswrapper[4622]: E1126 11:27:40.751689 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e\": container with ID starting with bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e not found: ID does not exist" containerID="bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.751739 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e"} err="failed to get container status \"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e\": rpc error: code = NotFound desc = could not find container \"bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e\": container with ID starting with bb0743b5a8fa9fdf0bd1a26da13b5bddb94d8f06cf3d722dde8cd4eb9053107e not found: ID does not exist" Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.756513 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.763586 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-649b8ddf5b-rsqxp"] Nov 26 11:27:40 crc kubenswrapper[4622]: I1126 11:27:40.974298 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5b79cbd499-vlhtt" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.869545 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 26 11:27:41 crc kubenswrapper[4622]: E1126 11:27:41.870229 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api-log" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.870246 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api-log" Nov 26 11:27:41 crc kubenswrapper[4622]: E1126 11:27:41.870264 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.870270 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.870527 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.870544 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" containerName="barbican-api-log" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.871096 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.873125 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-xztd2" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.874471 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.878141 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 26 11:27:41 crc kubenswrapper[4622]: I1126 11:27:41.879964 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.005746 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.005871 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config-secret\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.006117 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.006177 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfdzb\" (UniqueName: \"kubernetes.io/projected/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-kube-api-access-kfdzb\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.108264 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.108544 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config-secret\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.108815 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.108915 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfdzb\" (UniqueName: \"kubernetes.io/projected/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-kube-api-access-kfdzb\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.109658 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.114361 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-openstack-config-secret\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.126343 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.130060 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfdzb\" (UniqueName: \"kubernetes.io/projected/fdfff739-ae1f-43ce-ad8a-f3b6608b78af-kube-api-access-kfdzb\") pod \"openstackclient\" (UID: \"fdfff739-ae1f-43ce-ad8a-f3b6608b78af\") " pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.194673 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.611969 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.695260 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.750792 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20acddaa-a33d-4cb3-9a25-cc0f88aafca7" path="/var/lib/kubelet/pods/20acddaa-a33d-4cb3-9a25-cc0f88aafca7/volumes" Nov 26 11:27:42 crc kubenswrapper[4622]: I1126 11:27:42.771349 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"fdfff739-ae1f-43ce-ad8a-f3b6608b78af","Type":"ContainerStarted","Data":"bf3a545b836ae5637241f921e66baeba5465ecbe6e114a72c62fd07340a45100"} Nov 26 11:27:44 crc kubenswrapper[4622]: I1126 11:27:44.088354 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 26 11:27:46 crc kubenswrapper[4622]: I1126 11:27:46.948042 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:46 crc kubenswrapper[4622]: I1126 11:27:46.953263 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-bc8786d46-z584k" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.030684 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.177000 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-8jkdp"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.178336 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.186056 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8jkdp"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.264486 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2c73-account-create-update-294sk"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.266103 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.269631 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.280373 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.280711 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-central-agent" containerID="cri-o://32d7184bbad1b351dbdb44ca43874ea27f710aa23f520da7343b6dc77a7f558e" gracePeriod=30 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.280974 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="proxy-httpd" containerID="cri-o://88f901acb298622545ba8c960955c5aac61d8319be5599a6439161233f45300a" gracePeriod=30 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.281142 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-notification-agent" containerID="cri-o://8439cda5af9da9540d1a8b72894a00c826f2ff0a44ec5e884645dd2a3a2680c3" gracePeriod=30 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.281213 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="sg-core" containerID="cri-o://25376bd2a468ef5a80d35102487302ecf83656db92f89bd944411bfc297c3fbd" gracePeriod=30 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.285066 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.300804 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2c73-account-create-update-294sk"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.361005 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84nrn\" (UniqueName: \"kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.361204 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.361299 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.361394 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfpt6\" (UniqueName: \"kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.370578 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-4zjlx"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.372020 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.376122 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-4zjlx"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467345 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467417 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84nrn\" (UniqueName: \"kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467494 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb7f2\" (UniqueName: \"kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467621 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467704 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.467793 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfpt6\" (UniqueName: \"kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.468659 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.468751 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.469742 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-74vvh"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.470801 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.478928 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-8ef7-account-create-update-xcz5c"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.480228 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.483021 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.494029 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfpt6\" (UniqueName: \"kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6\") pod \"nova-api-2c73-account-create-update-294sk\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.496734 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84nrn\" (UniqueName: \"kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn\") pod \"nova-api-db-create-8jkdp\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.501861 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-74vvh"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.513772 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8ef7-account-create-update-xcz5c"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.515804 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569767 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569849 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr6kc\" (UniqueName: \"kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569881 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569921 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb7f2\" (UniqueName: \"kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569960 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn8c8\" (UniqueName: \"kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.569999 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.571033 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.584918 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb7f2\" (UniqueName: \"kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2\") pod \"nova-cell0-db-create-4zjlx\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.585591 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.669547 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-0293-account-create-update-9dthn"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.670650 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.671532 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn8c8\" (UniqueName: \"kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.671611 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.671714 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.671807 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr6kc\" (UniqueName: \"kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.673363 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.673539 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.673910 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.685630 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr6kc\" (UniqueName: \"kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc\") pod \"nova-cell0-8ef7-account-create-update-xcz5c\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.687141 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0293-account-create-update-9dthn"] Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.691089 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn8c8\" (UniqueName: \"kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8\") pod \"nova-cell1-db-create-74vvh\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.691741 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.773896 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqmgn\" (UniqueName: \"kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.774030 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822666 4622 generic.go:334] "Generic (PLEG): container finished" podID="6e996678-023a-45a2-b44e-a334186d0e2e" containerID="88f901acb298622545ba8c960955c5aac61d8319be5599a6439161233f45300a" exitCode=0 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822702 4622 generic.go:334] "Generic (PLEG): container finished" podID="6e996678-023a-45a2-b44e-a334186d0e2e" containerID="25376bd2a468ef5a80d35102487302ecf83656db92f89bd944411bfc297c3fbd" exitCode=2 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822710 4622 generic.go:334] "Generic (PLEG): container finished" podID="6e996678-023a-45a2-b44e-a334186d0e2e" containerID="32d7184bbad1b351dbdb44ca43874ea27f710aa23f520da7343b6dc77a7f558e" exitCode=0 Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822738 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerDied","Data":"88f901acb298622545ba8c960955c5aac61d8319be5599a6439161233f45300a"} Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822793 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerDied","Data":"25376bd2a468ef5a80d35102487302ecf83656db92f89bd944411bfc297c3fbd"} Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.822803 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerDied","Data":"32d7184bbad1b351dbdb44ca43874ea27f710aa23f520da7343b6dc77a7f558e"} Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.855367 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.871028 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.876653 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqmgn\" (UniqueName: \"kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.876761 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.877864 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:48 crc kubenswrapper[4622]: I1126 11:27:48.892039 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqmgn\" (UniqueName: \"kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn\") pod \"nova-cell1-0293-account-create-update-9dthn\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:49 crc kubenswrapper[4622]: I1126 11:27:49.051975 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:49 crc kubenswrapper[4622]: I1126 11:27:49.275243 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 26 11:27:49 crc kubenswrapper[4622]: I1126 11:27:49.843255 4622 generic.go:334] "Generic (PLEG): container finished" podID="6e996678-023a-45a2-b44e-a334186d0e2e" containerID="8439cda5af9da9540d1a8b72894a00c826f2ff0a44ec5e884645dd2a3a2680c3" exitCode=0 Nov 26 11:27:49 crc kubenswrapper[4622]: I1126 11:27:49.843467 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerDied","Data":"8439cda5af9da9540d1a8b72894a00c826f2ff0a44ec5e884645dd2a3a2680c3"} Nov 26 11:27:50 crc kubenswrapper[4622]: I1126 11:27:50.973554 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119517 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119887 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119907 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119936 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119958 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.119993 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgps2\" (UniqueName: \"kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.120118 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.120145 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd\") pod \"6e996678-023a-45a2-b44e-a334186d0e2e\" (UID: \"6e996678-023a-45a2-b44e-a334186d0e2e\") " Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.120857 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.121389 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.126560 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts" (OuterVolumeSpecName: "scripts") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.126869 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2" (OuterVolumeSpecName: "kube-api-access-zgps2") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "kube-api-access-zgps2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.145146 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.186879 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.215424 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data" (OuterVolumeSpecName: "config-data") pod "6e996678-023a-45a2-b44e-a334186d0e2e" (UID: "6e996678-023a-45a2-b44e-a334186d0e2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224148 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224347 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224395 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224409 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e996678-023a-45a2-b44e-a334186d0e2e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224418 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgps2\" (UniqueName: \"kubernetes.io/projected/6e996678-023a-45a2-b44e-a334186d0e2e-kube-api-access-zgps2\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.224431 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e996678-023a-45a2-b44e-a334186d0e2e-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.292880 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-74vvh"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.316078 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0293-account-create-update-9dthn"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.324943 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-4zjlx"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.413676 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2c73-account-create-update-294sk"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.482276 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8jkdp"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.489667 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8ef7-account-create-update-xcz5c"] Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.902401 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" event={"ID":"96413beb-fc9e-42bd-a4b1-0542d3d48944","Type":"ContainerStarted","Data":"eea1e0374644d6de4c3a59ccb7d3186d3003d87a105f590c736a3a6567020847"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.905729 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e996678-023a-45a2-b44e-a334186d0e2e","Type":"ContainerDied","Data":"530e717935b95a142cf8f6e8aab4130e0c602ddd86d3688154bc5b66083f9be5"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.905788 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.905831 4622 scope.go:117] "RemoveContainer" containerID="88f901acb298622545ba8c960955c5aac61d8319be5599a6439161233f45300a" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.909322 4622 generic.go:334] "Generic (PLEG): container finished" podID="d285966c-61fa-4be9-a16d-7aa8d0edcfe5" containerID="19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9" exitCode=0 Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.909395 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0293-account-create-update-9dthn" event={"ID":"d285966c-61fa-4be9-a16d-7aa8d0edcfe5","Type":"ContainerDied","Data":"19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.909413 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0293-account-create-update-9dthn" event={"ID":"d285966c-61fa-4be9-a16d-7aa8d0edcfe5","Type":"ContainerStarted","Data":"6c84df849eff252b3c4096e9c9b4f2f6ef688e69a0ad85ac765e334be1ebad71"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.910858 4622 generic.go:334] "Generic (PLEG): container finished" podID="1fc21caf-ae0d-4608-9437-f5180cf104c5" containerID="bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7" exitCode=0 Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.910972 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2c73-account-create-update-294sk" event={"ID":"1fc21caf-ae0d-4608-9437-f5180cf104c5","Type":"ContainerDied","Data":"bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.911056 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2c73-account-create-update-294sk" event={"ID":"1fc21caf-ae0d-4608-9437-f5180cf104c5","Type":"ContainerStarted","Data":"21711b406ffd2da329a1105e0d2b9700d5e56e0e588822b280391f113c38e24c"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.912205 4622 generic.go:334] "Generic (PLEG): container finished" podID="bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" containerID="5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2" exitCode=0 Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.912289 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-74vvh" event={"ID":"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f","Type":"ContainerDied","Data":"5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.912325 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-74vvh" event={"ID":"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f","Type":"ContainerStarted","Data":"c9dbebe77f575c68eacf5f54a77747893e80ea7070e315dc63414e9d4162a300"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.916871 4622 generic.go:334] "Generic (PLEG): container finished" podID="8a55622c-83da-4690-a510-1310eb081529" containerID="f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511" exitCode=0 Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.916934 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4zjlx" event={"ID":"8a55622c-83da-4690-a510-1310eb081529","Type":"ContainerDied","Data":"f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.916958 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4zjlx" event={"ID":"8a55622c-83da-4690-a510-1310eb081529","Type":"ContainerStarted","Data":"fb9ab4c92258fd0687f2033a20247b59f38b50f6019528a369b7e710375547a3"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.919790 4622 generic.go:334] "Generic (PLEG): container finished" podID="456ff799-1844-44ae-ac17-fb35b70ca9a4" containerID="193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4" exitCode=0 Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.919850 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8jkdp" event={"ID":"456ff799-1844-44ae-ac17-fb35b70ca9a4","Type":"ContainerDied","Data":"193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.919866 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8jkdp" event={"ID":"456ff799-1844-44ae-ac17-fb35b70ca9a4","Type":"ContainerStarted","Data":"af00f5a509741ed999b8324b78964234a866da1bae67ce3b8a1d7039bc1cd2d9"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.940047 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"fdfff739-ae1f-43ce-ad8a-f3b6608b78af","Type":"ContainerStarted","Data":"d2e7e1e2fd5720d41bc3ec6a8066570a39c86edb7817af332e582dc22dc9d789"} Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.958905 4622 scope.go:117] "RemoveContainer" containerID="25376bd2a468ef5a80d35102487302ecf83656db92f89bd944411bfc297c3fbd" Nov 26 11:27:51 crc kubenswrapper[4622]: I1126 11:27:51.987329 4622 scope.go:117] "RemoveContainer" containerID="8439cda5af9da9540d1a8b72894a00c826f2ff0a44ec5e884645dd2a3a2680c3" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.048197 4622 scope.go:117] "RemoveContainer" containerID="32d7184bbad1b351dbdb44ca43874ea27f710aa23f520da7343b6dc77a7f558e" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.056317 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.062334 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.063133 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.045050444 podStartE2EDuration="11.06312008s" podCreationTimestamp="2025-11-26 11:27:41 +0000 UTC" firstStartedPulling="2025-11-26 11:27:42.712685274 +0000 UTC m=+1022.303896796" lastFinishedPulling="2025-11-26 11:27:50.73075491 +0000 UTC m=+1030.321966432" observedRunningTime="2025-11-26 11:27:52.036894185 +0000 UTC m=+1031.628105718" watchObservedRunningTime="2025-11-26 11:27:52.06312008 +0000 UTC m=+1031.654331601" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.076673 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: E1126 11:27:52.077179 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-central-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077253 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-central-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: E1126 11:27:52.077320 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="proxy-httpd" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077365 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="proxy-httpd" Nov 26 11:27:52 crc kubenswrapper[4622]: E1126 11:27:52.077439 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="sg-core" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077490 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="sg-core" Nov 26 11:27:52 crc kubenswrapper[4622]: E1126 11:27:52.077560 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-notification-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077603 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-notification-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077850 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="proxy-httpd" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077915 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="sg-core" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.077970 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-central-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.078025 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" containerName="ceilometer-notification-agent" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.079516 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.082047 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.082374 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.082687 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.240988 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241067 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7vnz\" (UniqueName: \"kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241170 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241276 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241300 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241332 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.241389 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.342914 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343258 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343320 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343366 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343447 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7vnz\" (UniqueName: \"kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.343473 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.344257 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.344270 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.350111 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.350901 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.358640 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.358787 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.361824 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7vnz\" (UniqueName: \"kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz\") pod \"ceilometer-0\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.392375 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.716675 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e996678-023a-45a2-b44e-a334186d0e2e" path="/var/lib/kubelet/pods/6e996678-023a-45a2-b44e-a334186d0e2e/volumes" Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.806293 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.858463 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.950049 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerStarted","Data":"db30bbf005412c12c0bf6a364cb2af7d69cbabc39b43eae25abd228b4c4113f7"} Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.952301 4622 generic.go:334] "Generic (PLEG): container finished" podID="96413beb-fc9e-42bd-a4b1-0542d3d48944" containerID="c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2" exitCode=0 Nov 26 11:27:52 crc kubenswrapper[4622]: I1126 11:27:52.952776 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" event={"ID":"96413beb-fc9e-42bd-a4b1-0542d3d48944","Type":"ContainerDied","Data":"c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.281284 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.421012 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.428006 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.455561 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476280 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqmgn\" (UniqueName: \"kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn\") pod \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476363 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts\") pod \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476457 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts\") pod \"456ff799-1844-44ae-ac17-fb35b70ca9a4\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476462 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476607 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn8c8\" (UniqueName: \"kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8\") pod \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\" (UID: \"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476721 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84nrn\" (UniqueName: \"kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn\") pod \"456ff799-1844-44ae-ac17-fb35b70ca9a4\" (UID: \"456ff799-1844-44ae-ac17-fb35b70ca9a4\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476804 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts\") pod \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\" (UID: \"d285966c-61fa-4be9-a16d-7aa8d0edcfe5\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476857 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfpt6\" (UniqueName: \"kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6\") pod \"1fc21caf-ae0d-4608-9437-f5180cf104c5\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.476899 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts\") pod \"1fc21caf-ae0d-4608-9437-f5180cf104c5\" (UID: \"1fc21caf-ae0d-4608-9437-f5180cf104c5\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.477285 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "456ff799-1844-44ae-ac17-fb35b70ca9a4" (UID: "456ff799-1844-44ae-ac17-fb35b70ca9a4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.477629 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/456ff799-1844-44ae-ac17-fb35b70ca9a4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.478799 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d285966c-61fa-4be9-a16d-7aa8d0edcfe5" (UID: "d285966c-61fa-4be9-a16d-7aa8d0edcfe5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.479555 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" (UID: "bc5b6f9c-6f22-49fb-8506-3f7b25e7752f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.481477 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn" (OuterVolumeSpecName: "kube-api-access-fqmgn") pod "d285966c-61fa-4be9-a16d-7aa8d0edcfe5" (UID: "d285966c-61fa-4be9-a16d-7aa8d0edcfe5"). InnerVolumeSpecName "kube-api-access-fqmgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.481720 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8" (OuterVolumeSpecName: "kube-api-access-sn8c8") pod "bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" (UID: "bc5b6f9c-6f22-49fb-8506-3f7b25e7752f"). InnerVolumeSpecName "kube-api-access-sn8c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.483895 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6" (OuterVolumeSpecName: "kube-api-access-zfpt6") pod "1fc21caf-ae0d-4608-9437-f5180cf104c5" (UID: "1fc21caf-ae0d-4608-9437-f5180cf104c5"). InnerVolumeSpecName "kube-api-access-zfpt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.484843 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn" (OuterVolumeSpecName: "kube-api-access-84nrn") pod "456ff799-1844-44ae-ac17-fb35b70ca9a4" (UID: "456ff799-1844-44ae-ac17-fb35b70ca9a4"). InnerVolumeSpecName "kube-api-access-84nrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.486969 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1fc21caf-ae0d-4608-9437-f5180cf104c5" (UID: "1fc21caf-ae0d-4608-9437-f5180cf104c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579029 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts\") pod \"8a55622c-83da-4690-a510-1310eb081529\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579175 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb7f2\" (UniqueName: \"kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2\") pod \"8a55622c-83da-4690-a510-1310eb081529\" (UID: \"8a55622c-83da-4690-a510-1310eb081529\") " Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579636 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8a55622c-83da-4690-a510-1310eb081529" (UID: "8a55622c-83da-4690-a510-1310eb081529"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579929 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84nrn\" (UniqueName: \"kubernetes.io/projected/456ff799-1844-44ae-ac17-fb35b70ca9a4-kube-api-access-84nrn\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579950 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579960 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfpt6\" (UniqueName: \"kubernetes.io/projected/1fc21caf-ae0d-4608-9437-f5180cf104c5-kube-api-access-zfpt6\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579970 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fc21caf-ae0d-4608-9437-f5180cf104c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579979 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqmgn\" (UniqueName: \"kubernetes.io/projected/d285966c-61fa-4be9-a16d-7aa8d0edcfe5-kube-api-access-fqmgn\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579987 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.579995 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a55622c-83da-4690-a510-1310eb081529-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.580003 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn8c8\" (UniqueName: \"kubernetes.io/projected/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f-kube-api-access-sn8c8\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.582276 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2" (OuterVolumeSpecName: "kube-api-access-rb7f2") pod "8a55622c-83da-4690-a510-1310eb081529" (UID: "8a55622c-83da-4690-a510-1310eb081529"). InnerVolumeSpecName "kube-api-access-rb7f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.681769 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb7f2\" (UniqueName: \"kubernetes.io/projected/8a55622c-83da-4690-a510-1310eb081529-kube-api-access-rb7f2\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.972161 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0293-account-create-update-9dthn" event={"ID":"d285966c-61fa-4be9-a16d-7aa8d0edcfe5","Type":"ContainerDied","Data":"6c84df849eff252b3c4096e9c9b4f2f6ef688e69a0ad85ac765e334be1ebad71"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.972638 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c84df849eff252b3c4096e9c9b4f2f6ef688e69a0ad85ac765e334be1ebad71" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.972720 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0293-account-create-update-9dthn" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.980840 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2c73-account-create-update-294sk" event={"ID":"1fc21caf-ae0d-4608-9437-f5180cf104c5","Type":"ContainerDied","Data":"21711b406ffd2da329a1105e0d2b9700d5e56e0e588822b280391f113c38e24c"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.980875 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2c73-account-create-update-294sk" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.980911 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21711b406ffd2da329a1105e0d2b9700d5e56e0e588822b280391f113c38e24c" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.983530 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-74vvh" event={"ID":"bc5b6f9c-6f22-49fb-8506-3f7b25e7752f","Type":"ContainerDied","Data":"c9dbebe77f575c68eacf5f54a77747893e80ea7070e315dc63414e9d4162a300"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.983564 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9dbebe77f575c68eacf5f54a77747893e80ea7070e315dc63414e9d4162a300" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.983585 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-74vvh" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.986981 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4zjlx" event={"ID":"8a55622c-83da-4690-a510-1310eb081529","Type":"ContainerDied","Data":"fb9ab4c92258fd0687f2033a20247b59f38b50f6019528a369b7e710375547a3"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.987002 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb9ab4c92258fd0687f2033a20247b59f38b50f6019528a369b7e710375547a3" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.987006 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4zjlx" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.997480 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerStarted","Data":"9bba6fc744d71100dadfd4373e8d75e55bef4e12477564730eb9dad6ec0c9cea"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.999001 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8jkdp" Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.999011 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8jkdp" event={"ID":"456ff799-1844-44ae-ac17-fb35b70ca9a4","Type":"ContainerDied","Data":"af00f5a509741ed999b8324b78964234a866da1bae67ce3b8a1d7039bc1cd2d9"} Nov 26 11:27:53 crc kubenswrapper[4622]: I1126 11:27:53.999216 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af00f5a509741ed999b8324b78964234a866da1bae67ce3b8a1d7039bc1cd2d9" Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.280238 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.290016 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts\") pod \"96413beb-fc9e-42bd-a4b1-0542d3d48944\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.290096 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr6kc\" (UniqueName: \"kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc\") pod \"96413beb-fc9e-42bd-a4b1-0542d3d48944\" (UID: \"96413beb-fc9e-42bd-a4b1-0542d3d48944\") " Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.290614 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96413beb-fc9e-42bd-a4b1-0542d3d48944" (UID: "96413beb-fc9e-42bd-a4b1-0542d3d48944"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.290871 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96413beb-fc9e-42bd-a4b1-0542d3d48944-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.295249 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc" (OuterVolumeSpecName: "kube-api-access-hr6kc") pod "96413beb-fc9e-42bd-a4b1-0542d3d48944" (UID: "96413beb-fc9e-42bd-a4b1-0542d3d48944"). InnerVolumeSpecName "kube-api-access-hr6kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:54 crc kubenswrapper[4622]: I1126 11:27:54.393476 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr6kc\" (UniqueName: \"kubernetes.io/projected/96413beb-fc9e-42bd-a4b1-0542d3d48944-kube-api-access-hr6kc\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:55 crc kubenswrapper[4622]: I1126 11:27:55.008876 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerStarted","Data":"200ecde126650b05251a4a5a0e12fa031c53946462469848b2eadd26db881484"} Nov 26 11:27:55 crc kubenswrapper[4622]: I1126 11:27:55.012374 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" event={"ID":"96413beb-fc9e-42bd-a4b1-0542d3d48944","Type":"ContainerDied","Data":"eea1e0374644d6de4c3a59ccb7d3186d3003d87a105f590c736a3a6567020847"} Nov 26 11:27:55 crc kubenswrapper[4622]: I1126 11:27:55.012412 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eea1e0374644d6de4c3a59ccb7d3186d3003d87a105f590c736a3a6567020847" Nov 26 11:27:55 crc kubenswrapper[4622]: I1126 11:27:55.012477 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8ef7-account-create-update-xcz5c" Nov 26 11:27:56 crc kubenswrapper[4622]: I1126 11:27:56.022551 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerStarted","Data":"5df42f313bca1c14dc2046eaa1d7a5c3cbe555b710a895baaa6eb93b32b1e0c5"} Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.033545 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerStarted","Data":"5069597a41d9fd00ea2ba67cd5df67ba74eebab2c8b7cac0621f56e1fb96c01c"} Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.034779 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.033660 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-central-agent" containerID="cri-o://9bba6fc744d71100dadfd4373e8d75e55bef4e12477564730eb9dad6ec0c9cea" gracePeriod=30 Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.033780 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="sg-core" containerID="cri-o://5df42f313bca1c14dc2046eaa1d7a5c3cbe555b710a895baaa6eb93b32b1e0c5" gracePeriod=30 Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.033787 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="proxy-httpd" containerID="cri-o://5069597a41d9fd00ea2ba67cd5df67ba74eebab2c8b7cac0621f56e1fb96c01c" gracePeriod=30 Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.033768 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-notification-agent" containerID="cri-o://200ecde126650b05251a4a5a0e12fa031c53946462469848b2eadd26db881484" gracePeriod=30 Nov 26 11:27:57 crc kubenswrapper[4622]: I1126 11:27:57.058182 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.179385966 podStartE2EDuration="5.058164414s" podCreationTimestamp="2025-11-26 11:27:52 +0000 UTC" firstStartedPulling="2025-11-26 11:27:52.805912519 +0000 UTC m=+1032.397124040" lastFinishedPulling="2025-11-26 11:27:56.684690965 +0000 UTC m=+1036.275902488" observedRunningTime="2025-11-26 11:27:57.052335406 +0000 UTC m=+1036.643546928" watchObservedRunningTime="2025-11-26 11:27:57.058164414 +0000 UTC m=+1036.649375936" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.030484 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-686b8b87c4-gqg7z" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.031251 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044778 4622 generic.go:334] "Generic (PLEG): container finished" podID="dc828b01-beae-4d81-9210-daa43817167b" containerID="5069597a41d9fd00ea2ba67cd5df67ba74eebab2c8b7cac0621f56e1fb96c01c" exitCode=0 Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044811 4622 generic.go:334] "Generic (PLEG): container finished" podID="dc828b01-beae-4d81-9210-daa43817167b" containerID="5df42f313bca1c14dc2046eaa1d7a5c3cbe555b710a895baaa6eb93b32b1e0c5" exitCode=2 Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044819 4622 generic.go:334] "Generic (PLEG): container finished" podID="dc828b01-beae-4d81-9210-daa43817167b" containerID="200ecde126650b05251a4a5a0e12fa031c53946462469848b2eadd26db881484" exitCode=0 Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044826 4622 generic.go:334] "Generic (PLEG): container finished" podID="dc828b01-beae-4d81-9210-daa43817167b" containerID="9bba6fc744d71100dadfd4373e8d75e55bef4e12477564730eb9dad6ec0c9cea" exitCode=0 Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044851 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerDied","Data":"5069597a41d9fd00ea2ba67cd5df67ba74eebab2c8b7cac0621f56e1fb96c01c"} Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044887 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerDied","Data":"5df42f313bca1c14dc2046eaa1d7a5c3cbe555b710a895baaa6eb93b32b1e0c5"} Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044897 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerDied","Data":"200ecde126650b05251a4a5a0e12fa031c53946462469848b2eadd26db881484"} Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.044907 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerDied","Data":"9bba6fc744d71100dadfd4373e8d75e55bef4e12477564730eb9dad6ec0c9cea"} Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.130082 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181026 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181130 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7vnz\" (UniqueName: \"kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181181 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181303 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181366 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181398 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.181430 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd\") pod \"dc828b01-beae-4d81-9210-daa43817167b\" (UID: \"dc828b01-beae-4d81-9210-daa43817167b\") " Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.182714 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.186179 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.192049 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts" (OuterVolumeSpecName: "scripts") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.198970 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz" (OuterVolumeSpecName: "kube-api-access-s7vnz") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "kube-api-access-s7vnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.206443 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.238947 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.252066 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data" (OuterVolumeSpecName: "config-data") pod "dc828b01-beae-4d81-9210-daa43817167b" (UID: "dc828b01-beae-4d81-9210-daa43817167b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284799 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284829 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7vnz\" (UniqueName: \"kubernetes.io/projected/dc828b01-beae-4d81-9210-daa43817167b-kube-api-access-s7vnz\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284842 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284856 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284865 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284874 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc828b01-beae-4d81-9210-daa43817167b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.284882 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc828b01-beae-4d81-9210-daa43817167b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782171 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kk9n7"] Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782880 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-central-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782901 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-central-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782914 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="456ff799-1844-44ae-ac17-fb35b70ca9a4" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782920 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="456ff799-1844-44ae-ac17-fb35b70ca9a4" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782935 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="sg-core" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782941 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="sg-core" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782950 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96413beb-fc9e-42bd-a4b1-0542d3d48944" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782955 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="96413beb-fc9e-42bd-a4b1-0542d3d48944" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782965 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d285966c-61fa-4be9-a16d-7aa8d0edcfe5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782970 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d285966c-61fa-4be9-a16d-7aa8d0edcfe5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782980 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a55622c-83da-4690-a510-1310eb081529" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782985 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a55622c-83da-4690-a510-1310eb081529" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.782991 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc21caf-ae0d-4608-9437-f5180cf104c5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.782997 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc21caf-ae0d-4608-9437-f5180cf104c5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.783013 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="proxy-httpd" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783018 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="proxy-httpd" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.783028 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783034 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: E1126 11:27:58.783050 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-notification-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783055 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-notification-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783219 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783239 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="96413beb-fc9e-42bd-a4b1-0542d3d48944" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783250 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a55622c-83da-4690-a510-1310eb081529" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783263 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="proxy-httpd" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783269 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc21caf-ae0d-4608-9437-f5180cf104c5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783280 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d285966c-61fa-4be9-a16d-7aa8d0edcfe5" containerName="mariadb-account-create-update" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783292 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-notification-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783303 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="ceilometer-central-agent" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783315 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="456ff799-1844-44ae-ac17-fb35b70ca9a4" containerName="mariadb-database-create" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.783323 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc828b01-beae-4d81-9210-daa43817167b" containerName="sg-core" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.784189 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.786049 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mgwpq" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.786358 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.786631 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.797660 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kk9n7"] Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.911126 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.911206 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hqqs\" (UniqueName: \"kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.911412 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:58 crc kubenswrapper[4622]: I1126 11:27:58.911758 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.014733 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.014853 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.014935 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hqqs\" (UniqueName: \"kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.014996 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.019174 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.020133 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.028336 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.031331 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hqqs\" (UniqueName: \"kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs\") pod \"nova-cell0-conductor-db-sync-kk9n7\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.053978 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc828b01-beae-4d81-9210-daa43817167b","Type":"ContainerDied","Data":"db30bbf005412c12c0bf6a364cb2af7d69cbabc39b43eae25abd228b4c4113f7"} Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.054060 4622 scope.go:117] "RemoveContainer" containerID="5069597a41d9fd00ea2ba67cd5df67ba74eebab2c8b7cac0621f56e1fb96c01c" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.054346 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.098084 4622 scope.go:117] "RemoveContainer" containerID="5df42f313bca1c14dc2046eaa1d7a5c3cbe555b710a895baaa6eb93b32b1e0c5" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.108127 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.112050 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.122616 4622 scope.go:117] "RemoveContainer" containerID="200ecde126650b05251a4a5a0e12fa031c53946462469848b2eadd26db881484" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.136565 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.143022 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.145222 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.151884 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.152668 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.161186 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.169651 4622 scope.go:117] "RemoveContainer" containerID="9bba6fc744d71100dadfd4373e8d75e55bef4e12477564730eb9dad6ec0c9cea" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221449 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221511 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221589 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221656 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2rf2\" (UniqueName: \"kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221679 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221736 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.221783 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.323907 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2rf2\" (UniqueName: \"kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324080 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324155 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324213 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324307 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324334 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.324410 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.325965 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.326005 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.329202 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.329203 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.330702 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.331679 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.340611 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2rf2\" (UniqueName: \"kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2\") pod \"ceilometer-0\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.551009 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.642904 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kk9n7"] Nov 26 11:27:59 crc kubenswrapper[4622]: I1126 11:27:59.784646 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:00 crc kubenswrapper[4622]: I1126 11:28:00.063294 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerStarted","Data":"83f2eb0fbc6e217459da2fd441c54b68dfa15080d1a3911b778b298bb2fdc927"} Nov 26 11:28:00 crc kubenswrapper[4622]: I1126 11:28:00.064461 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" event={"ID":"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8","Type":"ContainerStarted","Data":"176ee61c80ddf565f21fe4da223b7a72519b89fdd5d419e9c8bb6b942bba76b8"} Nov 26 11:28:00 crc kubenswrapper[4622]: I1126 11:28:00.714722 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc828b01-beae-4d81-9210-daa43817167b" path="/var/lib/kubelet/pods/dc828b01-beae-4d81-9210-daa43817167b/volumes" Nov 26 11:28:01 crc kubenswrapper[4622]: I1126 11:28:01.074556 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerStarted","Data":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} Nov 26 11:28:02 crc kubenswrapper[4622]: I1126 11:28:02.084262 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerStarted","Data":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599164 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-c9dbebe77f575c68eacf5f54a77747893e80ea7070e315dc63414e9d4162a300": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-c9dbebe77f575c68eacf5f54a77747893e80ea7070e315dc63414e9d4162a300: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599452 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-6c84df849eff252b3c4096e9c9b4f2f6ef688e69a0ad85ac765e334be1ebad71": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-6c84df849eff252b3c4096e9c9b4f2f6ef688e69a0ad85ac765e334be1ebad71: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599478 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-fb9ab4c92258fd0687f2033a20247b59f38b50f6019528a369b7e710375547a3": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-fb9ab4c92258fd0687f2033a20247b59f38b50f6019528a369b7e710375547a3: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599492 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-conmon-5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-conmon-5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599571 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-conmon-19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-conmon-19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599648 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-conmon-f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-conmon-f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599665 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc5b6f9c_6f22_49fb_8506_3f7b25e7752f.slice/crio-5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599680 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd285966c_61fa_4be9_a16d_7aa8d0edcfe5.slice/crio-19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599692 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a55622c_83da_4690_a510_1310eb081529.slice/crio-f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599728 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-21711b406ffd2da329a1105e0d2b9700d5e56e0e588822b280391f113c38e24c": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-21711b406ffd2da329a1105e0d2b9700d5e56e0e588822b280391f113c38e24c: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599747 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-conmon-bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-conmon-bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599769 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-af00f5a509741ed999b8324b78964234a866da1bae67ce3b8a1d7039bc1cd2d9": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-af00f5a509741ed999b8324b78964234a866da1bae67ce3b8a1d7039bc1cd2d9: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599787 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-eea1e0374644d6de4c3a59ccb7d3186d3003d87a105f590c736a3a6567020847": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-eea1e0374644d6de4c3a59ccb7d3186d3003d87a105f590c736a3a6567020847: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599807 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fc21caf_ae0d_4608_9437_f5180cf104c5.slice/crio-bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599822 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-conmon-193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-conmon-193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599834 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456ff799_1844_44ae_ac17_fb35b70ca9a4.slice/crio-193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599847 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-conmon-c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-conmon-c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.599869 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96413beb_fc9e_42bd_a4b1_0542d3d48944.slice/crio-c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2.scope: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: W1126 11:28:02.602006 4622 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc828b01_beae_4d81_9210_daa43817167b.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc828b01_beae_4d81_9210_daa43817167b.slice: no such file or directory Nov 26 11:28:02 crc kubenswrapper[4622]: E1126 11:28:02.799162 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b663a5_e13e_4653_a4bb_340952a968c9.slice/crio-conmon-ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a.scope\": RecentStats: unable to find data in memory cache]" Nov 26 11:28:03 crc kubenswrapper[4622]: I1126 11:28:03.093100 4622 generic.go:334] "Generic (PLEG): container finished" podID="85b663a5-e13e-4653-a4bb-340952a968c9" containerID="ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a" exitCode=137 Nov 26 11:28:03 crc kubenswrapper[4622]: I1126 11:28:03.093152 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerDied","Data":"ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a"} Nov 26 11:28:03 crc kubenswrapper[4622]: I1126 11:28:03.095815 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerStarted","Data":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.399086 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.477957 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478196 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478232 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478280 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478301 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478363 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pjb2\" (UniqueName: \"kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.478476 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle\") pod \"85b663a5-e13e-4653-a4bb-340952a968c9\" (UID: \"85b663a5-e13e-4653-a4bb-340952a968c9\") " Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.479542 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs" (OuterVolumeSpecName: "logs") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.480980 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.484597 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2" (OuterVolumeSpecName: "kube-api-access-4pjb2") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "kube-api-access-4pjb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.497848 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.498944 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts" (OuterVolumeSpecName: "scripts") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.499168 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data" (OuterVolumeSpecName: "config-data") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.520237 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "85b663a5-e13e-4653-a4bb-340952a968c9" (UID: "85b663a5-e13e-4653-a4bb-340952a968c9"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580762 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580794 4622 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580805 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580814 4622 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85b663a5-e13e-4653-a4bb-340952a968c9-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580825 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85b663a5-e13e-4653-a4bb-340952a968c9-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580837 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b663a5-e13e-4653-a4bb-340952a968c9-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:06 crc kubenswrapper[4622]: I1126 11:28:06.580847 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pjb2\" (UniqueName: \"kubernetes.io/projected/85b663a5-e13e-4653-a4bb-340952a968c9-kube-api-access-4pjb2\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.096238 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.137842 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-686b8b87c4-gqg7z" event={"ID":"85b663a5-e13e-4653-a4bb-340952a968c9","Type":"ContainerDied","Data":"b2cb0cbcbebd97542e946031d9909db16c75abf625bd93306c5d0c28ad369d4a"} Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.137898 4622 scope.go:117] "RemoveContainer" containerID="db66f6b6e8a9d8a3409bda6d5204063e52dfe897279be8e132b9d8af69263fd6" Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.138015 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-686b8b87c4-gqg7z" Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.141899 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" event={"ID":"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8","Type":"ContainerStarted","Data":"b0d4492693c3367cfc43dd4def5bdffaea248e075c76642e18529a7c01b7d04c"} Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.161007 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" podStartSLOduration=2.644524126 podStartE2EDuration="9.160995474s" podCreationTimestamp="2025-11-26 11:27:58 +0000 UTC" firstStartedPulling="2025-11-26 11:27:59.658590444 +0000 UTC m=+1039.249801965" lastFinishedPulling="2025-11-26 11:28:06.175061791 +0000 UTC m=+1045.766273313" observedRunningTime="2025-11-26 11:28:07.154536237 +0000 UTC m=+1046.745747759" watchObservedRunningTime="2025-11-26 11:28:07.160995474 +0000 UTC m=+1046.752206997" Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.201559 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.208188 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-686b8b87c4-gqg7z"] Nov 26 11:28:07 crc kubenswrapper[4622]: I1126 11:28:07.316060 4622 scope.go:117] "RemoveContainer" containerID="ddae576e306c48d7087832ee3088ed9b10e1389b547debbf986408ac880d263a" Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.155781 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-central-agent" containerID="cri-o://88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" gracePeriod=30 Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.155890 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerStarted","Data":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.155939 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.156298 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="proxy-httpd" containerID="cri-o://ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" gracePeriod=30 Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.156346 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="sg-core" containerID="cri-o://e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" gracePeriod=30 Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.156401 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-notification-agent" containerID="cri-o://ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" gracePeriod=30 Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.189776 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.972229784 podStartE2EDuration="9.189749823s" podCreationTimestamp="2025-11-26 11:27:59 +0000 UTC" firstStartedPulling="2025-11-26 11:27:59.787693128 +0000 UTC m=+1039.378904651" lastFinishedPulling="2025-11-26 11:28:07.005213177 +0000 UTC m=+1046.596424690" observedRunningTime="2025-11-26 11:28:08.178646423 +0000 UTC m=+1047.769857945" watchObservedRunningTime="2025-11-26 11:28:08.189749823 +0000 UTC m=+1047.780961345" Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.719029 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" path="/var/lib/kubelet/pods/85b663a5-e13e-4653-a4bb-340952a968c9/volumes" Nov 26 11:28:08 crc kubenswrapper[4622]: I1126 11:28:08.977165 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026445 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026524 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026549 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026663 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026783 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026821 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2rf2\" (UniqueName: \"kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.026866 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml\") pod \"a5935113-0b69-44a6-a6d3-2bde183f7511\" (UID: \"a5935113-0b69-44a6-a6d3-2bde183f7511\") " Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.027364 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.027819 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.030862 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.034292 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2" (OuterVolumeSpecName: "kube-api-access-q2rf2") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "kube-api-access-q2rf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.034482 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts" (OuterVolumeSpecName: "scripts") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.051985 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.082845 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.100347 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data" (OuterVolumeSpecName: "config-data") pod "a5935113-0b69-44a6-a6d3-2bde183f7511" (UID: "a5935113-0b69-44a6-a6d3-2bde183f7511"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130411 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130451 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2rf2\" (UniqueName: \"kubernetes.io/projected/a5935113-0b69-44a6-a6d3-2bde183f7511-kube-api-access-q2rf2\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130466 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130477 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130486 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5935113-0b69-44a6-a6d3-2bde183f7511-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.130496 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5935113-0b69-44a6-a6d3-2bde183f7511-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167051 4622 generic.go:334] "Generic (PLEG): container finished" podID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" exitCode=0 Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167102 4622 generic.go:334] "Generic (PLEG): container finished" podID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" exitCode=2 Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167111 4622 generic.go:334] "Generic (PLEG): container finished" podID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" exitCode=0 Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167120 4622 generic.go:334] "Generic (PLEG): container finished" podID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" exitCode=0 Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167121 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerDied","Data":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167206 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerDied","Data":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167220 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerDied","Data":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167231 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerDied","Data":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167241 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5935113-0b69-44a6-a6d3-2bde183f7511","Type":"ContainerDied","Data":"83f2eb0fbc6e217459da2fd441c54b68dfa15080d1a3911b778b298bb2fdc927"} Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167141 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.167274 4622 scope.go:117] "RemoveContainer" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.185613 4622 scope.go:117] "RemoveContainer" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.214491 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.225113 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.231331 4622 scope.go:117] "RemoveContainer" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.237789 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238199 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-notification-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238217 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-notification-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238232 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="proxy-httpd" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238238 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="proxy-httpd" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238254 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238261 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238282 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-central-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238289 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-central-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238303 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="sg-core" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238308 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="sg-core" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.238318 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon-log" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238323 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon-log" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238470 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-notification-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238485 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="proxy-httpd" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238496 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon-log" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238519 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b663a5-e13e-4653-a4bb-340952a968c9" containerName="horizon" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238528 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="ceilometer-central-agent" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.238544 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" containerName="sg-core" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.239943 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.241688 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.241938 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.247786 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.251185 4622 scope.go:117] "RemoveContainer" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.274934 4622 scope.go:117] "RemoveContainer" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.275437 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": container with ID starting with ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386 not found: ID does not exist" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.275474 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} err="failed to get container status \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": rpc error: code = NotFound desc = could not find container \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": container with ID starting with ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.275533 4622 scope.go:117] "RemoveContainer" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.276104 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": container with ID starting with e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb not found: ID does not exist" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.276144 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} err="failed to get container status \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": rpc error: code = NotFound desc = could not find container \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": container with ID starting with e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.276172 4622 scope.go:117] "RemoveContainer" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.276563 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": container with ID starting with ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196 not found: ID does not exist" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.276622 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} err="failed to get container status \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": rpc error: code = NotFound desc = could not find container \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": container with ID starting with ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.276650 4622 scope.go:117] "RemoveContainer" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: E1126 11:28:09.277729 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": container with ID starting with 88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48 not found: ID does not exist" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.277762 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} err="failed to get container status \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": rpc error: code = NotFound desc = could not find container \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": container with ID starting with 88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.277783 4622 scope.go:117] "RemoveContainer" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.278074 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} err="failed to get container status \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": rpc error: code = NotFound desc = could not find container \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": container with ID starting with ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.278121 4622 scope.go:117] "RemoveContainer" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.278651 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} err="failed to get container status \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": rpc error: code = NotFound desc = could not find container \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": container with ID starting with e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.278671 4622 scope.go:117] "RemoveContainer" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.279192 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} err="failed to get container status \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": rpc error: code = NotFound desc = could not find container \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": container with ID starting with ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.279240 4622 scope.go:117] "RemoveContainer" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.279950 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} err="failed to get container status \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": rpc error: code = NotFound desc = could not find container \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": container with ID starting with 88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.279978 4622 scope.go:117] "RemoveContainer" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280324 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} err="failed to get container status \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": rpc error: code = NotFound desc = could not find container \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": container with ID starting with ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280346 4622 scope.go:117] "RemoveContainer" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280636 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} err="failed to get container status \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": rpc error: code = NotFound desc = could not find container \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": container with ID starting with e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280659 4622 scope.go:117] "RemoveContainer" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280950 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} err="failed to get container status \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": rpc error: code = NotFound desc = could not find container \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": container with ID starting with ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.280967 4622 scope.go:117] "RemoveContainer" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.281359 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} err="failed to get container status \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": rpc error: code = NotFound desc = could not find container \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": container with ID starting with 88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.281394 4622 scope.go:117] "RemoveContainer" containerID="ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.281840 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386"} err="failed to get container status \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": rpc error: code = NotFound desc = could not find container \"ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386\": container with ID starting with ccaf62dbb3995d2068e26231677d563a3da91bbf6869e23c0fe52fe2877d5386 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.281896 4622 scope.go:117] "RemoveContainer" containerID="e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.282373 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb"} err="failed to get container status \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": rpc error: code = NotFound desc = could not find container \"e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb\": container with ID starting with e0729bd63c08eb2878abda6937f6f0cb49f2f92c0778cd2ef162e508ad752bfb not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.282399 4622 scope.go:117] "RemoveContainer" containerID="ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.282806 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196"} err="failed to get container status \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": rpc error: code = NotFound desc = could not find container \"ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196\": container with ID starting with ea7e9c5b70d262b06767c61ba66c67a8ce462a6d2de77d2bc335ec9ce0ff6196 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.282825 4622 scope.go:117] "RemoveContainer" containerID="88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.283243 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48"} err="failed to get container status \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": rpc error: code = NotFound desc = could not find container \"88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48\": container with ID starting with 88c2c93ee240141e1ec632c2c28629b9733afc4f854d5428a8da56541e188c48 not found: ID does not exist" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.334816 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.335194 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.335372 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.335779 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.336028 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.336131 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.336366 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5kx9\" (UniqueName: \"kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.439666 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.439726 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.439820 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5kx9\" (UniqueName: \"kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.439918 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.439985 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.440023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.440061 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.440349 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.440654 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.445117 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.445330 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.446244 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.447963 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.455224 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5kx9\" (UniqueName: \"kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9\") pod \"ceilometer-0\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.554653 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:09 crc kubenswrapper[4622]: I1126 11:28:09.966356 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:09 crc kubenswrapper[4622]: W1126 11:28:09.967893 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05c52465_cf9e_461e_94f6_18aeb095f130.slice/crio-84e3c2fb118d89a1be82157a5a57776b438d462533c0b18f48a86df2878e8d05 WatchSource:0}: Error finding container 84e3c2fb118d89a1be82157a5a57776b438d462533c0b18f48a86df2878e8d05: Status 404 returned error can't find the container with id 84e3c2fb118d89a1be82157a5a57776b438d462533c0b18f48a86df2878e8d05 Nov 26 11:28:10 crc kubenswrapper[4622]: I1126 11:28:10.177725 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerStarted","Data":"84e3c2fb118d89a1be82157a5a57776b438d462533c0b18f48a86df2878e8d05"} Nov 26 11:28:10 crc kubenswrapper[4622]: I1126 11:28:10.718489 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5935113-0b69-44a6-a6d3-2bde183f7511" path="/var/lib/kubelet/pods/a5935113-0b69-44a6-a6d3-2bde183f7511/volumes" Nov 26 11:28:11 crc kubenswrapper[4622]: I1126 11:28:11.030194 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:11 crc kubenswrapper[4622]: I1126 11:28:11.190524 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerStarted","Data":"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e"} Nov 26 11:28:12 crc kubenswrapper[4622]: I1126 11:28:12.203402 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerStarted","Data":"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b"} Nov 26 11:28:12 crc kubenswrapper[4622]: I1126 11:28:12.204181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerStarted","Data":"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59"} Nov 26 11:28:12 crc kubenswrapper[4622]: I1126 11:28:12.207003 4622 generic.go:334] "Generic (PLEG): container finished" podID="e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" containerID="b0d4492693c3367cfc43dd4def5bdffaea248e075c76642e18529a7c01b7d04c" exitCode=0 Nov 26 11:28:12 crc kubenswrapper[4622]: I1126 11:28:12.207078 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" event={"ID":"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8","Type":"ContainerDied","Data":"b0d4492693c3367cfc43dd4def5bdffaea248e075c76642e18529a7c01b7d04c"} Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.482892 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.526664 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hqqs\" (UniqueName: \"kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs\") pod \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.527044 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data\") pod \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.527147 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts\") pod \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.527241 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle\") pod \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\" (UID: \"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8\") " Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.539650 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts" (OuterVolumeSpecName: "scripts") pod "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" (UID: "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.539750 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs" (OuterVolumeSpecName: "kube-api-access-7hqqs") pod "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" (UID: "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8"). InnerVolumeSpecName "kube-api-access-7hqqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.552738 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data" (OuterVolumeSpecName: "config-data") pod "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" (UID: "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.553210 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" (UID: "e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.631221 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.631279 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.631293 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:13 crc kubenswrapper[4622]: I1126 11:28:13.631310 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hqqs\" (UniqueName: \"kubernetes.io/projected/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8-kube-api-access-7hqqs\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.229963 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerStarted","Data":"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27"} Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.230440 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.230128 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-central-agent" containerID="cri-o://39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e" gracePeriod=30 Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.230315 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-notification-agent" containerID="cri-o://7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59" gracePeriod=30 Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.230344 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="sg-core" containerID="cri-o://5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b" gracePeriod=30 Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.230187 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="proxy-httpd" containerID="cri-o://c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27" gracePeriod=30 Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.232991 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" event={"ID":"e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8","Type":"ContainerDied","Data":"176ee61c80ddf565f21fe4da223b7a72519b89fdd5d419e9c8bb6b942bba76b8"} Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.233037 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="176ee61c80ddf565f21fe4da223b7a72519b89fdd5d419e9c8bb6b942bba76b8" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.233059 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kk9n7" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.263167 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.524853 podStartE2EDuration="5.26314665s" podCreationTimestamp="2025-11-26 11:28:09 +0000 UTC" firstStartedPulling="2025-11-26 11:28:09.970119624 +0000 UTC m=+1049.561331146" lastFinishedPulling="2025-11-26 11:28:13.708413274 +0000 UTC m=+1053.299624796" observedRunningTime="2025-11-26 11:28:14.252106228 +0000 UTC m=+1053.843317751" watchObservedRunningTime="2025-11-26 11:28:14.26314665 +0000 UTC m=+1053.854358173" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.319303 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 26 11:28:14 crc kubenswrapper[4622]: E1126 11:28:14.319676 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" containerName="nova-cell0-conductor-db-sync" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.319697 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" containerName="nova-cell0-conductor-db-sync" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.319902 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" containerName="nova-cell0-conductor-db-sync" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.320473 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.322984 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mgwpq" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.323420 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.328414 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.447808 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.448057 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcsjw\" (UniqueName: \"kubernetes.io/projected/d950a2ce-da5a-486f-9459-0da7366810fe-kube-api-access-qcsjw\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.448342 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.550181 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.550245 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.550340 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcsjw\" (UniqueName: \"kubernetes.io/projected/d950a2ce-da5a-486f-9459-0da7366810fe-kube-api-access-qcsjw\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.555855 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.555890 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d950a2ce-da5a-486f-9459-0da7366810fe-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.567675 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcsjw\" (UniqueName: \"kubernetes.io/projected/d950a2ce-da5a-486f-9459-0da7366810fe-kube-api-access-qcsjw\") pod \"nova-cell0-conductor-0\" (UID: \"d950a2ce-da5a-486f-9459-0da7366810fe\") " pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:14 crc kubenswrapper[4622]: I1126 11:28:14.634945 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.016810 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 26 11:28:15 crc kubenswrapper[4622]: W1126 11:28:15.016870 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd950a2ce_da5a_486f_9459_0da7366810fe.slice/crio-d7a5958db9184db46a9fe284e12d49eeadfa1f80fc5542199b2956dbf94d4684 WatchSource:0}: Error finding container d7a5958db9184db46a9fe284e12d49eeadfa1f80fc5542199b2956dbf94d4684: Status 404 returned error can't find the container with id d7a5958db9184db46a9fe284e12d49eeadfa1f80fc5542199b2956dbf94d4684 Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.199151 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.199579 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245220 4622 generic.go:334] "Generic (PLEG): container finished" podID="05c52465-cf9e-461e-94f6-18aeb095f130" containerID="c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27" exitCode=0 Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245285 4622 generic.go:334] "Generic (PLEG): container finished" podID="05c52465-cf9e-461e-94f6-18aeb095f130" containerID="5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b" exitCode=2 Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245294 4622 generic.go:334] "Generic (PLEG): container finished" podID="05c52465-cf9e-461e-94f6-18aeb095f130" containerID="7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59" exitCode=0 Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245295 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerDied","Data":"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27"} Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245346 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerDied","Data":"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b"} Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.245361 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerDied","Data":"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59"} Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.246819 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d950a2ce-da5a-486f-9459-0da7366810fe","Type":"ContainerStarted","Data":"85df9f80707d1bae75f37cb156118975a7d006af9fea588c8faa855e68a840c7"} Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.246842 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d950a2ce-da5a-486f-9459-0da7366810fe","Type":"ContainerStarted","Data":"d7a5958db9184db46a9fe284e12d49eeadfa1f80fc5542199b2956dbf94d4684"} Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.246975 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:15 crc kubenswrapper[4622]: I1126 11:28:15.261715 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.261700767 podStartE2EDuration="1.261700767s" podCreationTimestamp="2025-11-26 11:28:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:15.259239172 +0000 UTC m=+1054.850450694" watchObservedRunningTime="2025-11-26 11:28:15.261700767 +0000 UTC m=+1054.852912289" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.140158 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.202466 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.202633 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5kx9\" (UniqueName: \"kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.202775 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.202838 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.202914 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.203015 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.203037 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd\") pod \"05c52465-cf9e-461e-94f6-18aeb095f130\" (UID: \"05c52465-cf9e-461e-94f6-18aeb095f130\") " Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.203793 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.204492 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.212488 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts" (OuterVolumeSpecName: "scripts") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.222674 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9" (OuterVolumeSpecName: "kube-api-access-r5kx9") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "kube-api-access-r5kx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.227833 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.261809 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.269010 4622 generic.go:334] "Generic (PLEG): container finished" podID="05c52465-cf9e-461e-94f6-18aeb095f130" containerID="39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e" exitCode=0 Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.269073 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerDied","Data":"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e"} Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.269108 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.269131 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05c52465-cf9e-461e-94f6-18aeb095f130","Type":"ContainerDied","Data":"84e3c2fb118d89a1be82157a5a57776b438d462533c0b18f48a86df2878e8d05"} Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.269158 4622 scope.go:117] "RemoveContainer" containerID="c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.282752 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data" (OuterVolumeSpecName: "config-data") pod "05c52465-cf9e-461e-94f6-18aeb095f130" (UID: "05c52465-cf9e-461e-94f6-18aeb095f130"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.293720 4622 scope.go:117] "RemoveContainer" containerID="5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305890 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5kx9\" (UniqueName: \"kubernetes.io/projected/05c52465-cf9e-461e-94f6-18aeb095f130-kube-api-access-r5kx9\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305919 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305932 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305942 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305951 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305960 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05c52465-cf9e-461e-94f6-18aeb095f130-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.305969 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05c52465-cf9e-461e-94f6-18aeb095f130-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.314747 4622 scope.go:117] "RemoveContainer" containerID="7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.332219 4622 scope.go:117] "RemoveContainer" containerID="39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.347923 4622 scope.go:117] "RemoveContainer" containerID="c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.348297 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27\": container with ID starting with c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27 not found: ID does not exist" containerID="c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.348341 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27"} err="failed to get container status \"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27\": rpc error: code = NotFound desc = could not find container \"c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27\": container with ID starting with c01ae423af508009280aaef60b6ef506e6afc24e8f131750de15253204757c27 not found: ID does not exist" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.348366 4622 scope.go:117] "RemoveContainer" containerID="5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.348802 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b\": container with ID starting with 5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b not found: ID does not exist" containerID="5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.348860 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b"} err="failed to get container status \"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b\": rpc error: code = NotFound desc = could not find container \"5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b\": container with ID starting with 5db8703de4910f028d7a6bb62ea137e9c49e4e73d2eaab7db0b589bb4f72185b not found: ID does not exist" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.348901 4622 scope.go:117] "RemoveContainer" containerID="7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.349233 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59\": container with ID starting with 7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59 not found: ID does not exist" containerID="7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.349293 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59"} err="failed to get container status \"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59\": rpc error: code = NotFound desc = could not find container \"7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59\": container with ID starting with 7a7df4abf8917f2bb6444fa1018e5f042f408da502850c15b929c77819dfcc59 not found: ID does not exist" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.349324 4622 scope.go:117] "RemoveContainer" containerID="39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.349752 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e\": container with ID starting with 39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e not found: ID does not exist" containerID="39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.349785 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e"} err="failed to get container status \"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e\": rpc error: code = NotFound desc = could not find container \"39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e\": container with ID starting with 39a74ac6ccc7188fe5dcfed3f273de1559953434abce6cd7651ad1210465700e not found: ID does not exist" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.604204 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.620953 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.630844 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.631216 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-central-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631236 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-central-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.631248 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-notification-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631255 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-notification-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.631276 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="sg-core" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631282 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="sg-core" Nov 26 11:28:17 crc kubenswrapper[4622]: E1126 11:28:17.631309 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="proxy-httpd" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631314 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="proxy-httpd" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631490 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-notification-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631528 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="sg-core" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631538 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="ceilometer-central-agent" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.631550 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" containerName="proxy-httpd" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.633151 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.635582 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.636111 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.640667 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715245 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715318 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9b7n\" (UniqueName: \"kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715379 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715398 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715413 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715435 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.715485 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817062 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817199 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9b7n\" (UniqueName: \"kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817243 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817281 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817306 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817335 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817509 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817686 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.817966 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.825454 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.825774 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.826047 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.826203 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.851755 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9b7n\" (UniqueName: \"kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n\") pod \"ceilometer-0\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " pod="openstack/ceilometer-0" Nov 26 11:28:17 crc kubenswrapper[4622]: I1126 11:28:17.951029 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:18 crc kubenswrapper[4622]: I1126 11:28:18.369863 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:18 crc kubenswrapper[4622]: W1126 11:28:18.372679 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08486003_810e_4058_a0c9_20d3d9410cc1.slice/crio-19f0a2cf5645cec2a5568484746123c8e75815e7ac3bb425441719d2e83e30dc WatchSource:0}: Error finding container 19f0a2cf5645cec2a5568484746123c8e75815e7ac3bb425441719d2e83e30dc: Status 404 returned error can't find the container with id 19f0a2cf5645cec2a5568484746123c8e75815e7ac3bb425441719d2e83e30dc Nov 26 11:28:18 crc kubenswrapper[4622]: I1126 11:28:18.717985 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05c52465-cf9e-461e-94f6-18aeb095f130" path="/var/lib/kubelet/pods/05c52465-cf9e-461e-94f6-18aeb095f130/volumes" Nov 26 11:28:19 crc kubenswrapper[4622]: I1126 11:28:19.308052 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerStarted","Data":"19f0a2cf5645cec2a5568484746123c8e75815e7ac3bb425441719d2e83e30dc"} Nov 26 11:28:20 crc kubenswrapper[4622]: I1126 11:28:20.322006 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerStarted","Data":"82b288fc6f61ecc61a1a7769e2273a4d66ad0287ff91f80e44d3b0209078530e"} Nov 26 11:28:20 crc kubenswrapper[4622]: I1126 11:28:20.322553 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerStarted","Data":"38084117f861cdc998c2f931dc631e7d8ab639eb3886f88cf2cfc3a90817cfcd"} Nov 26 11:28:21 crc kubenswrapper[4622]: I1126 11:28:21.333902 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerStarted","Data":"9138af20fd9ebfc66484cc8cb7577fa824beb4dc2f7342db0f10c0ccd16a1478"} Nov 26 11:28:23 crc kubenswrapper[4622]: I1126 11:28:23.352354 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerStarted","Data":"b492990671b55c8ce867a6b3f691143904dc92555ab4d96adfd9a539f0fbe557"} Nov 26 11:28:23 crc kubenswrapper[4622]: I1126 11:28:23.352760 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:28:24 crc kubenswrapper[4622]: I1126 11:28:24.661334 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 26 11:28:24 crc kubenswrapper[4622]: I1126 11:28:24.682861 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.6295073650000003 podStartE2EDuration="7.682842959s" podCreationTimestamp="2025-11-26 11:28:17 +0000 UTC" firstStartedPulling="2025-11-26 11:28:18.375869399 +0000 UTC m=+1057.967080922" lastFinishedPulling="2025-11-26 11:28:22.429204994 +0000 UTC m=+1062.020416516" observedRunningTime="2025-11-26 11:28:23.375913526 +0000 UTC m=+1062.967125048" watchObservedRunningTime="2025-11-26 11:28:24.682842959 +0000 UTC m=+1064.274054481" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.079620 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-w8f5q"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.080702 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.082996 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.084611 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.091323 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w8f5q"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.180327 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.180776 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.180898 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmnf9\" (UniqueName: \"kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.180971 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.282785 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.282850 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmnf9\" (UniqueName: \"kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.282908 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.283007 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.289226 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.293001 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.302253 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.303605 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.307925 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.311959 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.326060 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmnf9\" (UniqueName: \"kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9\") pod \"nova-cell0-cell-mapping-w8f5q\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.348240 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.388884 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxdmg\" (UniqueName: \"kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.389171 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.389385 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.389468 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.404270 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.429413 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.430617 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.473713 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.503858 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxdmg\" (UniqueName: \"kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.503950 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.504056 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.504075 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.504187 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.504219 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.504295 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh7hm\" (UniqueName: \"kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.522929 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.547560 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.552781 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.554987 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxdmg\" (UniqueName: \"kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg\") pod \"nova-api-0\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.555128 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.605015 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.606602 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.610565 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh7hm\" (UniqueName: \"kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.610874 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.610883 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.611021 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.624992 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.625150 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.625670 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.659926 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh7hm\" (UniqueName: \"kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm\") pod \"nova-scheduler-0\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.684751 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.686165 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.689236 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.704176 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.715113 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzl2d\" (UniqueName: \"kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.715161 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.715302 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.715428 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.725092 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.746987 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.748646 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.765533 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817643 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817723 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p76nj\" (UniqueName: \"kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817779 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817834 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817892 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.817921 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818000 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818044 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818072 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kq8h\" (UniqueName: \"kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818125 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818221 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzl2d\" (UniqueName: \"kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.818253 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.820886 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.825256 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.828443 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.840941 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzl2d\" (UniqueName: \"kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d\") pod \"nova-metadata-0\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.892080 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920343 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920403 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920537 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kq8h\" (UniqueName: \"kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920584 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920710 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920742 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p76nj\" (UniqueName: \"kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.920797 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.921651 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.922301 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.922476 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.924879 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.926193 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.927001 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.931119 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.940064 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p76nj\" (UniqueName: \"kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj\") pod \"dnsmasq-dns-69494d9f89-vqqzj\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:25 crc kubenswrapper[4622]: I1126 11:28:25.940560 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kq8h\" (UniqueName: \"kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h\") pod \"nova-cell1-novncproxy-0\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.034296 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.074106 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.110866 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w8f5q"] Nov 26 11:28:26 crc kubenswrapper[4622]: W1126 11:28:26.158131 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc98829f1_bc94_4867_9c57_2caadb2ae3ae.slice/crio-85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648 WatchSource:0}: Error finding container 85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648: Status 404 returned error can't find the container with id 85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648 Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.207346 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.319397 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nnbm5"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.320941 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.325964 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.326131 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.346955 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nnbm5"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.396856 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w8f5q" event={"ID":"c98829f1-bc94-4867-9c57-2caadb2ae3ae","Type":"ContainerStarted","Data":"85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648"} Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.398198 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.398289 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerStarted","Data":"f93da95d530c36b4321332d9d52d3799844d72054d3634b3d0fc34e1df14c39e"} Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.432230 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjd8j\" (UniqueName: \"kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.432388 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.432439 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.432583 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: W1126 11:28:26.512695 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode920ae2c_a5a6_46e8_aca7_086abee6cf08.slice/crio-0a2ff897083af7f8b699cb8dd81f66902a0094d572dbcccbf5fbe97c0d746f49 WatchSource:0}: Error finding container 0a2ff897083af7f8b699cb8dd81f66902a0094d572dbcccbf5fbe97c0d746f49: Status 404 returned error can't find the container with id 0a2ff897083af7f8b699cb8dd81f66902a0094d572dbcccbf5fbe97c0d746f49 Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.526615 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.534639 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjd8j\" (UniqueName: \"kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.534838 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.534878 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.535122 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.535998 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.541876 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.543057 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.552993 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.558415 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjd8j\" (UniqueName: \"kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j\") pod \"nova-cell1-conductor-db-sync-nnbm5\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.679395 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:28:26 crc kubenswrapper[4622]: I1126 11:28:26.680669 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:26 crc kubenswrapper[4622]: W1126 11:28:26.685058 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod926ada02_c646_4251_b21a_3e341402d619.slice/crio-2af32bd3ab40d12f5374c13e8562efe834de8d3e3b948f85d71952fb6ca42f1e WatchSource:0}: Error finding container 2af32bd3ab40d12f5374c13e8562efe834de8d3e3b948f85d71952fb6ca42f1e: Status 404 returned error can't find the container with id 2af32bd3ab40d12f5374c13e8562efe834de8d3e3b948f85d71952fb6ca42f1e Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.120325 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nnbm5"] Nov 26 11:28:27 crc kubenswrapper[4622]: W1126 11:28:27.124297 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod497319a3_f556_4fbc_8c50_66ddff723bda.slice/crio-4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966 WatchSource:0}: Error finding container 4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966: Status 404 returned error can't find the container with id 4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966 Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.409900 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w8f5q" event={"ID":"c98829f1-bc94-4867-9c57-2caadb2ae3ae","Type":"ContainerStarted","Data":"6fba37a5e02e65b15833f2f028dd820de9a60d7acd88c747abf6a665b4164c02"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.412392 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e920ae2c-a5a6-46e8-aca7-086abee6cf08","Type":"ContainerStarted","Data":"0a2ff897083af7f8b699cb8dd81f66902a0094d572dbcccbf5fbe97c0d746f49"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.414473 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" event={"ID":"497319a3-f556-4fbc-8c50-66ddff723bda","Type":"ContainerStarted","Data":"625a3515b6940cdf5cb533148aa8bc8f493bf72916d2d1077d8560ad3e012c31"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.414530 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" event={"ID":"497319a3-f556-4fbc-8c50-66ddff723bda","Type":"ContainerStarted","Data":"4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.417360 4622 generic.go:334] "Generic (PLEG): container finished" podID="926ada02-c646-4251-b21a-3e341402d619" containerID="eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9" exitCode=0 Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.417479 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" event={"ID":"926ada02-c646-4251-b21a-3e341402d619","Type":"ContainerDied","Data":"eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.417537 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" event={"ID":"926ada02-c646-4251-b21a-3e341402d619","Type":"ContainerStarted","Data":"2af32bd3ab40d12f5374c13e8562efe834de8d3e3b948f85d71952fb6ca42f1e"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.420746 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bbb4e34f-697b-4447-984a-78933d18f3c5","Type":"ContainerStarted","Data":"627ca6be8eddd8938d0521554b45ea4f6a0cf85ae7c94071b05459e168c10203"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.421788 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerStarted","Data":"3cb316aed68d7d8f69bbda2a15004f136e174f884f28aa3ad88782d47a94b0aa"} Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.431944 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-w8f5q" podStartSLOduration=2.431926392 podStartE2EDuration="2.431926392s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:27.423796062 +0000 UTC m=+1067.015007584" watchObservedRunningTime="2025-11-26 11:28:27.431926392 +0000 UTC m=+1067.023137914" Nov 26 11:28:27 crc kubenswrapper[4622]: I1126 11:28:27.479122 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" podStartSLOduration=1.4790775489999999 podStartE2EDuration="1.479077549s" podCreationTimestamp="2025-11-26 11:28:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:27.436453214 +0000 UTC m=+1067.027664736" watchObservedRunningTime="2025-11-26 11:28:27.479077549 +0000 UTC m=+1067.070289091" Nov 26 11:28:28 crc kubenswrapper[4622]: I1126 11:28:28.434730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" event={"ID":"926ada02-c646-4251-b21a-3e341402d619","Type":"ContainerStarted","Data":"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4"} Nov 26 11:28:28 crc kubenswrapper[4622]: I1126 11:28:28.435422 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:28 crc kubenswrapper[4622]: I1126 11:28:28.449936 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" podStartSLOduration=3.449914748 podStartE2EDuration="3.449914748s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:28.448885536 +0000 UTC m=+1068.040097059" watchObservedRunningTime="2025-11-26 11:28:28.449914748 +0000 UTC m=+1068.041126271" Nov 26 11:28:28 crc kubenswrapper[4622]: I1126 11:28:28.805440 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:28:28 crc kubenswrapper[4622]: I1126 11:28:28.853628 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.443744 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerStarted","Data":"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a"} Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.444036 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerStarted","Data":"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29"} Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.447339 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bbb4e34f-697b-4447-984a-78933d18f3c5","Type":"ContainerStarted","Data":"481ad87f697ee9d001eb3ff5a399ff0657c47fd9bc9897fbf3ef2586f2066da6"} Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.449907 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerStarted","Data":"cc1b0d51a08e90923d14e8fd416207a1bd85e357b11d03489b66d7f6b9da6ac8"} Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.450121 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-log" containerID="cri-o://cc1b0d51a08e90923d14e8fd416207a1bd85e357b11d03489b66d7f6b9da6ac8" gracePeriod=30 Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.450177 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-metadata" containerID="cri-o://35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7" gracePeriod=30 Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.452247 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e" gracePeriod=30 Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.452430 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e920ae2c-a5a6-46e8-aca7-086abee6cf08","Type":"ContainerStarted","Data":"9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e"} Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.463141 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.683175044 podStartE2EDuration="4.463122063s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="2025-11-26 11:28:26.228382292 +0000 UTC m=+1065.819593813" lastFinishedPulling="2025-11-26 11:28:29.00832931 +0000 UTC m=+1068.599540832" observedRunningTime="2025-11-26 11:28:29.459821855 +0000 UTC m=+1069.051033378" watchObservedRunningTime="2025-11-26 11:28:29.463122063 +0000 UTC m=+1069.054333584" Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.481918 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.8817081249999998 podStartE2EDuration="4.481899843s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="2025-11-26 11:28:26.408670277 +0000 UTC m=+1065.999881799" lastFinishedPulling="2025-11-26 11:28:29.008861995 +0000 UTC m=+1068.600073517" observedRunningTime="2025-11-26 11:28:29.476216059 +0000 UTC m=+1069.067427581" watchObservedRunningTime="2025-11-26 11:28:29.481899843 +0000 UTC m=+1069.073111366" Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.492526 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.9977743540000001 podStartE2EDuration="4.492493062s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="2025-11-26 11:28:26.514977552 +0000 UTC m=+1066.106189064" lastFinishedPulling="2025-11-26 11:28:29.009696249 +0000 UTC m=+1068.600907772" observedRunningTime="2025-11-26 11:28:29.488021084 +0000 UTC m=+1069.079232605" watchObservedRunningTime="2025-11-26 11:28:29.492493062 +0000 UTC m=+1069.083704584" Nov 26 11:28:29 crc kubenswrapper[4622]: I1126 11:28:29.506354 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.012209971 podStartE2EDuration="4.50633533s" podCreationTimestamp="2025-11-26 11:28:25 +0000 UTC" firstStartedPulling="2025-11-26 11:28:26.518787922 +0000 UTC m=+1066.109999443" lastFinishedPulling="2025-11-26 11:28:29.012913279 +0000 UTC m=+1068.604124802" observedRunningTime="2025-11-26 11:28:29.505040767 +0000 UTC m=+1069.096252289" watchObservedRunningTime="2025-11-26 11:28:29.50633533 +0000 UTC m=+1069.097546852" Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.459795 4622 generic.go:334] "Generic (PLEG): container finished" podID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerID="cc1b0d51a08e90923d14e8fd416207a1bd85e357b11d03489b66d7f6b9da6ac8" exitCode=143 Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.459940 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerStarted","Data":"35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7"} Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.459981 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerDied","Data":"cc1b0d51a08e90923d14e8fd416207a1bd85e357b11d03489b66d7f6b9da6ac8"} Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.892620 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.932447 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:28:30 crc kubenswrapper[4622]: I1126 11:28:30.932542 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:28:31 crc kubenswrapper[4622]: I1126 11:28:31.035249 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:31 crc kubenswrapper[4622]: I1126 11:28:31.476820 4622 generic.go:334] "Generic (PLEG): container finished" podID="497319a3-f556-4fbc-8c50-66ddff723bda" containerID="625a3515b6940cdf5cb533148aa8bc8f493bf72916d2d1077d8560ad3e012c31" exitCode=0 Nov 26 11:28:31 crc kubenswrapper[4622]: I1126 11:28:31.478079 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" event={"ID":"497319a3-f556-4fbc-8c50-66ddff723bda","Type":"ContainerDied","Data":"625a3515b6940cdf5cb533148aa8bc8f493bf72916d2d1077d8560ad3e012c31"} Nov 26 11:28:32 crc kubenswrapper[4622]: I1126 11:28:32.856008 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:32 crc kubenswrapper[4622]: I1126 11:28:32.995193 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle\") pod \"497319a3-f556-4fbc-8c50-66ddff723bda\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " Nov 26 11:28:32 crc kubenswrapper[4622]: I1126 11:28:32.995324 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjd8j\" (UniqueName: \"kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j\") pod \"497319a3-f556-4fbc-8c50-66ddff723bda\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " Nov 26 11:28:32 crc kubenswrapper[4622]: I1126 11:28:32.995372 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts\") pod \"497319a3-f556-4fbc-8c50-66ddff723bda\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " Nov 26 11:28:32 crc kubenswrapper[4622]: I1126 11:28:32.995416 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data\") pod \"497319a3-f556-4fbc-8c50-66ddff723bda\" (UID: \"497319a3-f556-4fbc-8c50-66ddff723bda\") " Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.001928 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j" (OuterVolumeSpecName: "kube-api-access-tjd8j") pod "497319a3-f556-4fbc-8c50-66ddff723bda" (UID: "497319a3-f556-4fbc-8c50-66ddff723bda"). InnerVolumeSpecName "kube-api-access-tjd8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.002410 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts" (OuterVolumeSpecName: "scripts") pod "497319a3-f556-4fbc-8c50-66ddff723bda" (UID: "497319a3-f556-4fbc-8c50-66ddff723bda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.019970 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "497319a3-f556-4fbc-8c50-66ddff723bda" (UID: "497319a3-f556-4fbc-8c50-66ddff723bda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.020790 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data" (OuterVolumeSpecName: "config-data") pod "497319a3-f556-4fbc-8c50-66ddff723bda" (UID: "497319a3-f556-4fbc-8c50-66ddff723bda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.097942 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.097980 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjd8j\" (UniqueName: \"kubernetes.io/projected/497319a3-f556-4fbc-8c50-66ddff723bda-kube-api-access-tjd8j\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.097996 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.098005 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497319a3-f556-4fbc-8c50-66ddff723bda-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.497885 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.497893 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nnbm5" event={"ID":"497319a3-f556-4fbc-8c50-66ddff723bda","Type":"ContainerDied","Data":"4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966"} Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.498517 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4551f9e798e904db23c1ed401f4a4d691b52ca14e84b734d24aaea14efeee966" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.499792 4622 generic.go:334] "Generic (PLEG): container finished" podID="c98829f1-bc94-4867-9c57-2caadb2ae3ae" containerID="6fba37a5e02e65b15833f2f028dd820de9a60d7acd88c747abf6a665b4164c02" exitCode=0 Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.499828 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w8f5q" event={"ID":"c98829f1-bc94-4867-9c57-2caadb2ae3ae","Type":"ContainerDied","Data":"6fba37a5e02e65b15833f2f028dd820de9a60d7acd88c747abf6a665b4164c02"} Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.581550 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 26 11:28:33 crc kubenswrapper[4622]: E1126 11:28:33.582242 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="497319a3-f556-4fbc-8c50-66ddff723bda" containerName="nova-cell1-conductor-db-sync" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.582270 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="497319a3-f556-4fbc-8c50-66ddff723bda" containerName="nova-cell1-conductor-db-sync" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.582582 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="497319a3-f556-4fbc-8c50-66ddff723bda" containerName="nova-cell1-conductor-db-sync" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.583368 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.587448 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.591328 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.713725 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.713830 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qznr5\" (UniqueName: \"kubernetes.io/projected/851f7097-5531-4bf6-9768-2c1cd70db989-kube-api-access-qznr5\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.714026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.818831 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.819218 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.819442 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qznr5\" (UniqueName: \"kubernetes.io/projected/851f7097-5531-4bf6-9768-2c1cd70db989-kube-api-access-qznr5\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.827059 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.828034 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851f7097-5531-4bf6-9768-2c1cd70db989-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.849281 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qznr5\" (UniqueName: \"kubernetes.io/projected/851f7097-5531-4bf6-9768-2c1cd70db989-kube-api-access-qznr5\") pod \"nova-cell1-conductor-0\" (UID: \"851f7097-5531-4bf6-9768-2c1cd70db989\") " pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:33 crc kubenswrapper[4622]: I1126 11:28:33.902329 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.294851 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.515558 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"851f7097-5531-4bf6-9768-2c1cd70db989","Type":"ContainerStarted","Data":"e6e4d0192f0b27ad37c3432494728324e8de69b5ba61ee109d7f0b7f63b4c455"} Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.515833 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"851f7097-5531-4bf6-9768-2c1cd70db989","Type":"ContainerStarted","Data":"fc7e2d4e657eb697a4183417b7930e1ddca9c639a91ff14f9f2b8516c3bd2da1"} Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.539103 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.539085117 podStartE2EDuration="1.539085117s" podCreationTimestamp="2025-11-26 11:28:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:34.533799473 +0000 UTC m=+1074.125010995" watchObservedRunningTime="2025-11-26 11:28:34.539085117 +0000 UTC m=+1074.130296640" Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.790546 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.947160 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data\") pod \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.948100 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmnf9\" (UniqueName: \"kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9\") pod \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.948211 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle\") pod \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.948372 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts\") pod \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\" (UID: \"c98829f1-bc94-4867-9c57-2caadb2ae3ae\") " Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.970749 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9" (OuterVolumeSpecName: "kube-api-access-hmnf9") pod "c98829f1-bc94-4867-9c57-2caadb2ae3ae" (UID: "c98829f1-bc94-4867-9c57-2caadb2ae3ae"). InnerVolumeSpecName "kube-api-access-hmnf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:34 crc kubenswrapper[4622]: I1126 11:28:34.981640 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts" (OuterVolumeSpecName: "scripts") pod "c98829f1-bc94-4867-9c57-2caadb2ae3ae" (UID: "c98829f1-bc94-4867-9c57-2caadb2ae3ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.003610 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c98829f1-bc94-4867-9c57-2caadb2ae3ae" (UID: "c98829f1-bc94-4867-9c57-2caadb2ae3ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.006711 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data" (OuterVolumeSpecName: "config-data") pod "c98829f1-bc94-4867-9c57-2caadb2ae3ae" (UID: "c98829f1-bc94-4867-9c57-2caadb2ae3ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.054132 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.054245 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.054316 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmnf9\" (UniqueName: \"kubernetes.io/projected/c98829f1-bc94-4867-9c57-2caadb2ae3ae-kube-api-access-hmnf9\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.054385 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98829f1-bc94-4867-9c57-2caadb2ae3ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.525599 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w8f5q" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.525569 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w8f5q" event={"ID":"c98829f1-bc94-4867-9c57-2caadb2ae3ae","Type":"ContainerDied","Data":"85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648"} Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.525769 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85574ab8f3e8954e3bd2a93b5b5986f1b79f43e9c264dfdf0f141adc78467648" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.525825 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.705959 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.706236 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-api" containerID="cri-o://3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" gracePeriod=30 Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.706183 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-log" containerID="cri-o://2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" gracePeriod=30 Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.721520 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:35 crc kubenswrapper[4622]: I1126 11:28:35.721884 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="bbb4e34f-697b-4447-984a-78933d18f3c5" containerName="nova-scheduler-scheduler" containerID="cri-o://481ad87f697ee9d001eb3ff5a399ff0657c47fd9bc9897fbf3ef2586f2066da6" gracePeriod=30 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.076669 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.132691 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.132966 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-775457b975-fbhtg" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="dnsmasq-dns" containerID="cri-o://0c38a8a7f7d9958ccd9440d88525c72312c8275c676617163374cfb72bde4190" gracePeriod=10 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.411453 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.486397 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxdmg\" (UniqueName: \"kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg\") pod \"13e08c44-f453-4fc7-945f-583a722ab377\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.486449 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data\") pod \"13e08c44-f453-4fc7-945f-583a722ab377\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.486674 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle\") pod \"13e08c44-f453-4fc7-945f-583a722ab377\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.486794 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs\") pod \"13e08c44-f453-4fc7-945f-583a722ab377\" (UID: \"13e08c44-f453-4fc7-945f-583a722ab377\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.487520 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs" (OuterVolumeSpecName: "logs") pod "13e08c44-f453-4fc7-945f-583a722ab377" (UID: "13e08c44-f453-4fc7-945f-583a722ab377"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.492222 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg" (OuterVolumeSpecName: "kube-api-access-dxdmg") pod "13e08c44-f453-4fc7-945f-583a722ab377" (UID: "13e08c44-f453-4fc7-945f-583a722ab377"). InnerVolumeSpecName "kube-api-access-dxdmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.521757 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13e08c44-f453-4fc7-945f-583a722ab377" (UID: "13e08c44-f453-4fc7-945f-583a722ab377"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.532080 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data" (OuterVolumeSpecName: "config-data") pod "13e08c44-f453-4fc7-945f-583a722ab377" (UID: "13e08c44-f453-4fc7-945f-583a722ab377"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.543310 4622 generic.go:334] "Generic (PLEG): container finished" podID="bbb4e34f-697b-4447-984a-78933d18f3c5" containerID="481ad87f697ee9d001eb3ff5a399ff0657c47fd9bc9897fbf3ef2586f2066da6" exitCode=0 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.543371 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bbb4e34f-697b-4447-984a-78933d18f3c5","Type":"ContainerDied","Data":"481ad87f697ee9d001eb3ff5a399ff0657c47fd9bc9897fbf3ef2586f2066da6"} Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.563179 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.563213 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775457b975-fbhtg" event={"ID":"c506ee05-4c61-48c9-a61d-2a641b387abd","Type":"ContainerDied","Data":"0c38a8a7f7d9958ccd9440d88525c72312c8275c676617163374cfb72bde4190"} Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.563178 4622 generic.go:334] "Generic (PLEG): container finished" podID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerID="0c38a8a7f7d9958ccd9440d88525c72312c8275c676617163374cfb72bde4190" exitCode=0 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.573255 4622 generic.go:334] "Generic (PLEG): container finished" podID="13e08c44-f453-4fc7-945f-583a722ab377" containerID="3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" exitCode=0 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.573302 4622 generic.go:334] "Generic (PLEG): container finished" podID="13e08c44-f453-4fc7-945f-583a722ab377" containerID="2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" exitCode=143 Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.574437 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.574466 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerDied","Data":"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a"} Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.574536 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerDied","Data":"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29"} Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.574550 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"13e08c44-f453-4fc7-945f-583a722ab377","Type":"ContainerDied","Data":"f93da95d530c36b4321332d9d52d3799844d72054d3634b3d0fc34e1df14c39e"} Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.574568 4622 scope.go:117] "RemoveContainer" containerID="3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.589062 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxdmg\" (UniqueName: \"kubernetes.io/projected/13e08c44-f453-4fc7-945f-583a722ab377-kube-api-access-dxdmg\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.589092 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.589103 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e08c44-f453-4fc7-945f-583a722ab377-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.589111 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e08c44-f453-4fc7-945f-583a722ab377-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.608554 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.612167 4622 scope.go:117] "RemoveContainer" containerID="2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.618229 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.633166 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.647693 4622 scope.go:117] "RemoveContainer" containerID="3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.655770 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a\": container with ID starting with 3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a not found: ID does not exist" containerID="3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.655820 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a"} err="failed to get container status \"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a\": rpc error: code = NotFound desc = could not find container \"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a\": container with ID starting with 3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a not found: ID does not exist" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.655855 4622 scope.go:117] "RemoveContainer" containerID="2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.656411 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29\": container with ID starting with 2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29 not found: ID does not exist" containerID="2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.656512 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29"} err="failed to get container status \"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29\": rpc error: code = NotFound desc = could not find container \"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29\": container with ID starting with 2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29 not found: ID does not exist" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.656580 4622 scope.go:117] "RemoveContainer" containerID="3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.659652 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660664 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-api" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660691 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-api" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660719 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="dnsmasq-dns" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660727 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="dnsmasq-dns" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660747 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c98829f1-bc94-4867-9c57-2caadb2ae3ae" containerName="nova-manage" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660753 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c98829f1-bc94-4867-9c57-2caadb2ae3ae" containerName="nova-manage" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660767 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="init" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660776 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="init" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660796 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-log" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660804 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-log" Nov 26 11:28:36 crc kubenswrapper[4622]: E1126 11:28:36.660837 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb4e34f-697b-4447-984a-78933d18f3c5" containerName="nova-scheduler-scheduler" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.660844 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb4e34f-697b-4447-984a-78933d18f3c5" containerName="nova-scheduler-scheduler" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.661303 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb4e34f-697b-4447-984a-78933d18f3c5" containerName="nova-scheduler-scheduler" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.661335 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-log" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.661355 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c98829f1-bc94-4867-9c57-2caadb2ae3ae" containerName="nova-manage" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.661377 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e08c44-f453-4fc7-945f-583a722ab377" containerName="nova-api-api" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.661389 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" containerName="dnsmasq-dns" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.663350 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.664020 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a"} err="failed to get container status \"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a\": rpc error: code = NotFound desc = could not find container \"3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a\": container with ID starting with 3f5648dd0a8223fde7aa70ab1d0bc002bfa2128acffc496b3cb5866fe4ed1d5a not found: ID does not exist" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.664081 4622 scope.go:117] "RemoveContainer" containerID="2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.666195 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29"} err="failed to get container status \"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29\": rpc error: code = NotFound desc = could not find container \"2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29\": container with ID starting with 2ffe8e9cf920222b93f4744070428efa4d8a3901bc015d04aec3b938aab0ff29 not found: ID does not exist" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.668525 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.672414 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690133 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb\") pod \"c506ee05-4c61-48c9-a61d-2a641b387abd\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690183 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd7mx\" (UniqueName: \"kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx\") pod \"c506ee05-4c61-48c9-a61d-2a641b387abd\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690240 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb\") pod \"c506ee05-4c61-48c9-a61d-2a641b387abd\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690310 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc\") pod \"c506ee05-4c61-48c9-a61d-2a641b387abd\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690380 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data\") pod \"bbb4e34f-697b-4447-984a-78933d18f3c5\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690448 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config\") pod \"c506ee05-4c61-48c9-a61d-2a641b387abd\" (UID: \"c506ee05-4c61-48c9-a61d-2a641b387abd\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690566 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mh7hm\" (UniqueName: \"kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm\") pod \"bbb4e34f-697b-4447-984a-78933d18f3c5\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.690593 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle\") pod \"bbb4e34f-697b-4447-984a-78933d18f3c5\" (UID: \"bbb4e34f-697b-4447-984a-78933d18f3c5\") " Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.704103 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm" (OuterVolumeSpecName: "kube-api-access-mh7hm") pod "bbb4e34f-697b-4447-984a-78933d18f3c5" (UID: "bbb4e34f-697b-4447-984a-78933d18f3c5"). InnerVolumeSpecName "kube-api-access-mh7hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.704142 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx" (OuterVolumeSpecName: "kube-api-access-nd7mx") pod "c506ee05-4c61-48c9-a61d-2a641b387abd" (UID: "c506ee05-4c61-48c9-a61d-2a641b387abd"). InnerVolumeSpecName "kube-api-access-nd7mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.711204 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data" (OuterVolumeSpecName: "config-data") pod "bbb4e34f-697b-4447-984a-78933d18f3c5" (UID: "bbb4e34f-697b-4447-984a-78933d18f3c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.715139 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13e08c44-f453-4fc7-945f-583a722ab377" path="/var/lib/kubelet/pods/13e08c44-f453-4fc7-945f-583a722ab377/volumes" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.726136 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbb4e34f-697b-4447-984a-78933d18f3c5" (UID: "bbb4e34f-697b-4447-984a-78933d18f3c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.729451 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c506ee05-4c61-48c9-a61d-2a641b387abd" (UID: "c506ee05-4c61-48c9-a61d-2a641b387abd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.730475 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c506ee05-4c61-48c9-a61d-2a641b387abd" (UID: "c506ee05-4c61-48c9-a61d-2a641b387abd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.730697 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config" (OuterVolumeSpecName: "config") pod "c506ee05-4c61-48c9-a61d-2a641b387abd" (UID: "c506ee05-4c61-48c9-a61d-2a641b387abd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.734702 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c506ee05-4c61-48c9-a61d-2a641b387abd" (UID: "c506ee05-4c61-48c9-a61d-2a641b387abd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794382 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794439 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svgg8\" (UniqueName: \"kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794474 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794744 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794868 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794913 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mh7hm\" (UniqueName: \"kubernetes.io/projected/bbb4e34f-697b-4447-984a-78933d18f3c5-kube-api-access-mh7hm\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794933 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794945 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794957 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd7mx\" (UniqueName: \"kubernetes.io/projected/c506ee05-4c61-48c9-a61d-2a641b387abd-kube-api-access-nd7mx\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794969 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794982 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c506ee05-4c61-48c9-a61d-2a641b387abd-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.794995 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb4e34f-697b-4447-984a-78933d18f3c5-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.897573 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.897626 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svgg8\" (UniqueName: \"kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.897654 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.897758 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.898481 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.901720 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.902475 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.913841 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svgg8\" (UniqueName: \"kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8\") pod \"nova-api-0\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " pod="openstack/nova-api-0" Nov 26 11:28:36 crc kubenswrapper[4622]: I1126 11:28:36.983345 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.385439 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:37 crc kubenswrapper[4622]: W1126 11:28:37.388904 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c44e56_3ca0_48fb_8f03_235fee59fa3d.slice/crio-f64712c808e60a9ca971d2b46ed1e079b0652e6d3b924be878644e13e26fc2b7 WatchSource:0}: Error finding container f64712c808e60a9ca971d2b46ed1e079b0652e6d3b924be878644e13e26fc2b7: Status 404 returned error can't find the container with id f64712c808e60a9ca971d2b46ed1e079b0652e6d3b924be878644e13e26fc2b7 Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.583455 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerStarted","Data":"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198"} Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.583552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerStarted","Data":"f64712c808e60a9ca971d2b46ed1e079b0652e6d3b924be878644e13e26fc2b7"} Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.585579 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bbb4e34f-697b-4447-984a-78933d18f3c5","Type":"ContainerDied","Data":"627ca6be8eddd8938d0521554b45ea4f6a0cf85ae7c94071b05459e168c10203"} Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.585617 4622 scope.go:117] "RemoveContainer" containerID="481ad87f697ee9d001eb3ff5a399ff0657c47fd9bc9897fbf3ef2586f2066da6" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.585765 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.588765 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775457b975-fbhtg" event={"ID":"c506ee05-4c61-48c9-a61d-2a641b387abd","Type":"ContainerDied","Data":"4043a34847a6d241ccf89f27b78cfff405953bc07e0ac8df93284014dc8676c8"} Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.588853 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775457b975-fbhtg" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.618400 4622 scope.go:117] "RemoveContainer" containerID="0c38a8a7f7d9958ccd9440d88525c72312c8275c676617163374cfb72bde4190" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.636981 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.645142 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.648809 4622 scope.go:117] "RemoveContainer" containerID="5b85fb6ac47bc65852b765ba1bc2af5008dd54b67cfee029150e396654fb8596" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.655076 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.656708 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.659128 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.659870 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.665126 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-775457b975-fbhtg"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.670551 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.821258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9wqn\" (UniqueName: \"kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.821350 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.821395 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.923869 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.923962 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.924159 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9wqn\" (UniqueName: \"kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.929594 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.929683 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.939227 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9wqn\" (UniqueName: \"kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn\") pod \"nova-scheduler-0\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " pod="openstack/nova-scheduler-0" Nov 26 11:28:37 crc kubenswrapper[4622]: I1126 11:28:37.977546 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.453830 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:28:38 crc kubenswrapper[4622]: W1126 11:28:38.458138 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0264e46_87bd_4327_a62b_a71c1af8c806.slice/crio-3815e045e6d7d46cbb9c648cfc62220742c76b87697cc6756ccd37acba5b6e50 WatchSource:0}: Error finding container 3815e045e6d7d46cbb9c648cfc62220742c76b87697cc6756ccd37acba5b6e50: Status 404 returned error can't find the container with id 3815e045e6d7d46cbb9c648cfc62220742c76b87697cc6756ccd37acba5b6e50 Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.608028 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerStarted","Data":"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159"} Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.613326 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e0264e46-87bd-4327-a62b-a71c1af8c806","Type":"ContainerStarted","Data":"3815e045e6d7d46cbb9c648cfc62220742c76b87697cc6756ccd37acba5b6e50"} Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.624662 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.624642279 podStartE2EDuration="2.624642279s" podCreationTimestamp="2025-11-26 11:28:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:38.623572269 +0000 UTC m=+1078.214783802" watchObservedRunningTime="2025-11-26 11:28:38.624642279 +0000 UTC m=+1078.215853801" Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.718521 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbb4e34f-697b-4447-984a-78933d18f3c5" path="/var/lib/kubelet/pods/bbb4e34f-697b-4447-984a-78933d18f3c5/volumes" Nov 26 11:28:38 crc kubenswrapper[4622]: I1126 11:28:38.719382 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c506ee05-4c61-48c9-a61d-2a641b387abd" path="/var/lib/kubelet/pods/c506ee05-4c61-48c9-a61d-2a641b387abd/volumes" Nov 26 11:28:39 crc kubenswrapper[4622]: I1126 11:28:39.631184 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e0264e46-87bd-4327-a62b-a71c1af8c806","Type":"ContainerStarted","Data":"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9"} Nov 26 11:28:42 crc kubenswrapper[4622]: I1126 11:28:42.978922 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 26 11:28:43 crc kubenswrapper[4622]: I1126 11:28:43.924927 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 26 11:28:43 crc kubenswrapper[4622]: I1126 11:28:43.940052 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=6.940028257 podStartE2EDuration="6.940028257s" podCreationTimestamp="2025-11-26 11:28:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:39.661861761 +0000 UTC m=+1079.253073283" watchObservedRunningTime="2025-11-26 11:28:43.940028257 +0000 UTC m=+1083.531239778" Nov 26 11:28:45 crc kubenswrapper[4622]: I1126 11:28:45.199161 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:28:45 crc kubenswrapper[4622]: I1126 11:28:45.199562 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:28:46 crc kubenswrapper[4622]: I1126 11:28:46.984192 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:28:46 crc kubenswrapper[4622]: I1126 11:28:46.984275 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:28:47 crc kubenswrapper[4622]: I1126 11:28:47.956454 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 26 11:28:47 crc kubenswrapper[4622]: I1126 11:28:47.981741 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 26 11:28:48 crc kubenswrapper[4622]: I1126 11:28:48.013886 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 26 11:28:48 crc kubenswrapper[4622]: I1126 11:28:48.067658 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 26 11:28:48 crc kubenswrapper[4622]: I1126 11:28:48.067725 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 26 11:28:48 crc kubenswrapper[4622]: I1126 11:28:48.723277 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 26 11:28:49 crc kubenswrapper[4622]: I1126 11:28:49.689753 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:49 crc kubenswrapper[4622]: I1126 11:28:49.689961 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="20655504-f2d9-4a76-b534-b479e7660957" containerName="kube-state-metrics" containerID="cri-o://290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c" gracePeriod=30 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.125927 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.268230 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs47f\" (UniqueName: \"kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f\") pod \"20655504-f2d9-4a76-b534-b479e7660957\" (UID: \"20655504-f2d9-4a76-b534-b479e7660957\") " Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.273925 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f" (OuterVolumeSpecName: "kube-api-access-gs47f") pod "20655504-f2d9-4a76-b534-b479e7660957" (UID: "20655504-f2d9-4a76-b534-b479e7660957"). InnerVolumeSpecName "kube-api-access-gs47f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.371155 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs47f\" (UniqueName: \"kubernetes.io/projected/20655504-f2d9-4a76-b534-b479e7660957-kube-api-access-gs47f\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.521710 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.522203 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-central-agent" containerID="cri-o://38084117f861cdc998c2f931dc631e7d8ab639eb3886f88cf2cfc3a90817cfcd" gracePeriod=30 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.522251 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="proxy-httpd" containerID="cri-o://b492990671b55c8ce867a6b3f691143904dc92555ab4d96adfd9a539f0fbe557" gracePeriod=30 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.522272 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-notification-agent" containerID="cri-o://82b288fc6f61ecc61a1a7769e2273a4d66ad0287ff91f80e44d3b0209078530e" gracePeriod=30 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.522284 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="sg-core" containerID="cri-o://9138af20fd9ebfc66484cc8cb7577fa824beb4dc2f7342db0f10c0ccd16a1478" gracePeriod=30 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.718222 4622 generic.go:334] "Generic (PLEG): container finished" podID="20655504-f2d9-4a76-b534-b479e7660957" containerID="290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c" exitCode=2 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.718271 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.718310 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20655504-f2d9-4a76-b534-b479e7660957","Type":"ContainerDied","Data":"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c"} Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.718347 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20655504-f2d9-4a76-b534-b479e7660957","Type":"ContainerDied","Data":"debfb6a58a932f5bc26787f924989c5177276e087c187122e3a6396cf8824efd"} Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.718365 4622 scope.go:117] "RemoveContainer" containerID="290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.721628 4622 generic.go:334] "Generic (PLEG): container finished" podID="08486003-810e-4058-a0c9-20d3d9410cc1" containerID="b492990671b55c8ce867a6b3f691143904dc92555ab4d96adfd9a539f0fbe557" exitCode=0 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.721647 4622 generic.go:334] "Generic (PLEG): container finished" podID="08486003-810e-4058-a0c9-20d3d9410cc1" containerID="9138af20fd9ebfc66484cc8cb7577fa824beb4dc2f7342db0f10c0ccd16a1478" exitCode=2 Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.721661 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerDied","Data":"b492990671b55c8ce867a6b3f691143904dc92555ab4d96adfd9a539f0fbe557"} Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.721676 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerDied","Data":"9138af20fd9ebfc66484cc8cb7577fa824beb4dc2f7342db0f10c0ccd16a1478"} Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.743302 4622 scope.go:117] "RemoveContainer" containerID="290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c" Nov 26 11:28:50 crc kubenswrapper[4622]: E1126 11:28:50.748113 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c\": container with ID starting with 290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c not found: ID does not exist" containerID="290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.748190 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c"} err="failed to get container status \"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c\": rpc error: code = NotFound desc = could not find container \"290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c\": container with ID starting with 290e6f0fac579d0d4c90db27fdfeeb7e2402fd0f1f26528808605d51f299160c not found: ID does not exist" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.757573 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.772337 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.779809 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:50 crc kubenswrapper[4622]: E1126 11:28:50.780260 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20655504-f2d9-4a76-b534-b479e7660957" containerName="kube-state-metrics" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.780280 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="20655504-f2d9-4a76-b534-b479e7660957" containerName="kube-state-metrics" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.780555 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="20655504-f2d9-4a76-b534-b479e7660957" containerName="kube-state-metrics" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.781302 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.783562 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.784252 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.786781 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.881180 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.881313 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.881459 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.881620 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nj5h\" (UniqueName: \"kubernetes.io/projected/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-api-access-8nj5h\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.983313 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.983526 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.983677 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nj5h\" (UniqueName: \"kubernetes.io/projected/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-api-access-8nj5h\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.983968 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.988052 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.988651 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.989058 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:50 crc kubenswrapper[4622]: I1126 11:28:50.999403 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nj5h\" (UniqueName: \"kubernetes.io/projected/70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2-kube-api-access-8nj5h\") pod \"kube-state-metrics-0\" (UID: \"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2\") " pod="openstack/kube-state-metrics-0" Nov 26 11:28:51 crc kubenswrapper[4622]: I1126 11:28:51.096806 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 26 11:28:51 crc kubenswrapper[4622]: I1126 11:28:51.521746 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 26 11:28:51 crc kubenswrapper[4622]: I1126 11:28:51.736782 4622 generic.go:334] "Generic (PLEG): container finished" podID="08486003-810e-4058-a0c9-20d3d9410cc1" containerID="38084117f861cdc998c2f931dc631e7d8ab639eb3886f88cf2cfc3a90817cfcd" exitCode=0 Nov 26 11:28:51 crc kubenswrapper[4622]: I1126 11:28:51.736891 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerDied","Data":"38084117f861cdc998c2f931dc631e7d8ab639eb3886f88cf2cfc3a90817cfcd"} Nov 26 11:28:51 crc kubenswrapper[4622]: I1126 11:28:51.738619 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2","Type":"ContainerStarted","Data":"c842174b34d7c0bf3765056b01dc11b830f9f5980c6faf987ffea722959e1dda"} Nov 26 11:28:52 crc kubenswrapper[4622]: I1126 11:28:52.718028 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20655504-f2d9-4a76-b534-b479e7660957" path="/var/lib/kubelet/pods/20655504-f2d9-4a76-b534-b479e7660957/volumes" Nov 26 11:28:52 crc kubenswrapper[4622]: I1126 11:28:52.748403 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2","Type":"ContainerStarted","Data":"3410d31923252765a9955d02b2a0bc313dc05087207ff70548e1c63f96823efe"} Nov 26 11:28:52 crc kubenswrapper[4622]: I1126 11:28:52.749529 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 26 11:28:52 crc kubenswrapper[4622]: I1126 11:28:52.769050 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.438441592 podStartE2EDuration="2.769029391s" podCreationTimestamp="2025-11-26 11:28:50 +0000 UTC" firstStartedPulling="2025-11-26 11:28:51.529608125 +0000 UTC m=+1091.120819647" lastFinishedPulling="2025-11-26 11:28:51.860195924 +0000 UTC m=+1091.451407446" observedRunningTime="2025-11-26 11:28:52.766565863 +0000 UTC m=+1092.357777395" watchObservedRunningTime="2025-11-26 11:28:52.769029391 +0000 UTC m=+1092.360240913" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.772308 4622 generic.go:334] "Generic (PLEG): container finished" podID="08486003-810e-4058-a0c9-20d3d9410cc1" containerID="82b288fc6f61ecc61a1a7769e2273a4d66ad0287ff91f80e44d3b0209078530e" exitCode=0 Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.772379 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerDied","Data":"82b288fc6f61ecc61a1a7769e2273a4d66ad0287ff91f80e44d3b0209078530e"} Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.900414 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.983947 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9b7n\" (UniqueName: \"kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984001 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984065 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984093 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984148 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984183 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984272 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd\") pod \"08486003-810e-4058-a0c9-20d3d9410cc1\" (UID: \"08486003-810e-4058-a0c9-20d3d9410cc1\") " Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984791 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984898 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.984919 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.989001 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n" (OuterVolumeSpecName: "kube-api-access-t9b7n") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "kube-api-access-t9b7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:55 crc kubenswrapper[4622]: I1126 11:28:55.989149 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts" (OuterVolumeSpecName: "scripts") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.004427 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.031240 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.057784 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data" (OuterVolumeSpecName: "config-data") pod "08486003-810e-4058-a0c9-20d3d9410cc1" (UID: "08486003-810e-4058-a0c9-20d3d9410cc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087714 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9b7n\" (UniqueName: \"kubernetes.io/projected/08486003-810e-4058-a0c9-20d3d9410cc1-kube-api-access-t9b7n\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087746 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087758 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08486003-810e-4058-a0c9-20d3d9410cc1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087768 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087780 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.087790 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08486003-810e-4058-a0c9-20d3d9410cc1-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.781341 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08486003-810e-4058-a0c9-20d3d9410cc1","Type":"ContainerDied","Data":"19f0a2cf5645cec2a5568484746123c8e75815e7ac3bb425441719d2e83e30dc"} Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.781389 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.781400 4622 scope.go:117] "RemoveContainer" containerID="b492990671b55c8ce867a6b3f691143904dc92555ab4d96adfd9a539f0fbe557" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.800306 4622 scope.go:117] "RemoveContainer" containerID="9138af20fd9ebfc66484cc8cb7577fa824beb4dc2f7342db0f10c0ccd16a1478" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.801138 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.809489 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.823076 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:56 crc kubenswrapper[4622]: E1126 11:28:56.825600 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-central-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825628 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-central-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: E1126 11:28:56.825644 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="proxy-httpd" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825650 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="proxy-httpd" Nov 26 11:28:56 crc kubenswrapper[4622]: E1126 11:28:56.825669 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-notification-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825675 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-notification-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: E1126 11:28:56.825698 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="sg-core" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825704 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="sg-core" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825925 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-notification-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825948 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="sg-core" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825968 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="ceilometer-central-agent" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.825977 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" containerName="proxy-httpd" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.827896 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.830408 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.831157 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.831285 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.833324 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.840686 4622 scope.go:117] "RemoveContainer" containerID="82b288fc6f61ecc61a1a7769e2273a4d66ad0287ff91f80e44d3b0209078530e" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.858376 4622 scope.go:117] "RemoveContainer" containerID="38084117f861cdc998c2f931dc631e7d8ab639eb3886f88cf2cfc3a90817cfcd" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.988052 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.988279 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.988645 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.988673 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.990923 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 26 11:28:56 crc kubenswrapper[4622]: I1126 11:28:56.990960 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003419 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003526 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqgtl\" (UniqueName: \"kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003555 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003582 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003662 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003716 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003757 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.003943 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.106197 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.106966 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107142 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqgtl\" (UniqueName: \"kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107253 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107346 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107600 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107689 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.107778 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.108607 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.108881 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.115914 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.116826 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.117841 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.120362 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.124795 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.125475 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqgtl\" (UniqueName: \"kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl\") pod \"ceilometer-0\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.144149 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.144614 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.145680 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.162251 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.312234 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.312764 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.312791 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xgjp\" (UniqueName: \"kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.312829 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.312872 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.415029 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.415070 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xgjp\" (UniqueName: \"kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.415099 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.415137 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.415225 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.416194 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.416757 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.419119 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.420271 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.431928 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xgjp\" (UniqueName: \"kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp\") pod \"dnsmasq-dns-c9b558957-lc98m\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.524904 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.569840 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.794730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerStarted","Data":"15393f63233c04bb8917976eeb91f2b1bdd25696677121d5100e22edec243b8b"} Nov 26 11:28:57 crc kubenswrapper[4622]: I1126 11:28:57.936592 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.539694 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.730457 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08486003-810e-4058-a0c9-20d3d9410cc1" path="/var/lib/kubelet/pods/08486003-810e-4058-a0c9-20d3d9410cc1/volumes" Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.804844 4622 generic.go:334] "Generic (PLEG): container finished" podID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerID="f5382a88da8456425d308915bd1014b7af35be6afeb261c548cead5553a5fa2b" exitCode=0 Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.804951 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9b558957-lc98m" event={"ID":"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb","Type":"ContainerDied","Data":"f5382a88da8456425d308915bd1014b7af35be6afeb261c548cead5553a5fa2b"} Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.805012 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9b558957-lc98m" event={"ID":"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb","Type":"ContainerStarted","Data":"e8d741ceb21b4aba3a67b66a993e5a874dbc176a63b450d52dd830c999d54a2b"} Nov 26 11:28:58 crc kubenswrapper[4622]: I1126 11:28:58.808075 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerStarted","Data":"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded"} Nov 26 11:28:59 crc kubenswrapper[4622]: E1126 11:28:59.689224 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf818b7cc_2b84_4bff_8c7e_e51a240179df.slice/crio-35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf818b7cc_2b84_4bff_8c7e_e51a240179df.slice/crio-conmon-35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode920ae2c_a5a6_46e8_aca7_086abee6cf08.slice/crio-9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode920ae2c_a5a6_46e8_aca7_086abee6cf08.slice/crio-conmon-9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e.scope\": RecentStats: unable to find data in memory cache]" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.803798 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.825799 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerStarted","Data":"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e"} Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.828095 4622 generic.go:334] "Generic (PLEG): container finished" podID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" containerID="9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e" exitCode=137 Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.828233 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e920ae2c-a5a6-46e8-aca7-086abee6cf08","Type":"ContainerDied","Data":"9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e"} Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.829883 4622 generic.go:334] "Generic (PLEG): container finished" podID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerID="35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7" exitCode=137 Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.830002 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerDied","Data":"35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7"} Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.830069 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f818b7cc-2b84-4bff-8c7e-e51a240179df","Type":"ContainerDied","Data":"3cb316aed68d7d8f69bbda2a15004f136e174f884f28aa3ad88782d47a94b0aa"} Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.830137 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cb316aed68d7d8f69bbda2a15004f136e174f884f28aa3ad88782d47a94b0aa" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.832622 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9b558957-lc98m" event={"ID":"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb","Type":"ContainerStarted","Data":"26bd97411f6ed7055a6a61ab6fbbe3c8a8d0d3dc865dfde127e4f9fe1b5c2eea"} Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.832795 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-log" containerID="cri-o://125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198" gracePeriod=30 Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.833326 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-api" containerID="cri-o://35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159" gracePeriod=30 Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.920092 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.921927 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.955306 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c9b558957-lc98m" podStartSLOduration=2.955280812 podStartE2EDuration="2.955280812s" podCreationTimestamp="2025-11-26 11:28:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:28:59.866832954 +0000 UTC m=+1099.458044476" watchObservedRunningTime="2025-11-26 11:28:59.955280812 +0000 UTC m=+1099.546492334" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970627 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data\") pod \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970698 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data\") pod \"f818b7cc-2b84-4bff-8c7e-e51a240179df\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970741 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kq8h\" (UniqueName: \"kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h\") pod \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970771 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle\") pod \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\" (UID: \"e920ae2c-a5a6-46e8-aca7-086abee6cf08\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970807 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzl2d\" (UniqueName: \"kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d\") pod \"f818b7cc-2b84-4bff-8c7e-e51a240179df\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.970844 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle\") pod \"f818b7cc-2b84-4bff-8c7e-e51a240179df\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.971015 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs\") pod \"f818b7cc-2b84-4bff-8c7e-e51a240179df\" (UID: \"f818b7cc-2b84-4bff-8c7e-e51a240179df\") " Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.971807 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs" (OuterVolumeSpecName: "logs") pod "f818b7cc-2b84-4bff-8c7e-e51a240179df" (UID: "f818b7cc-2b84-4bff-8c7e-e51a240179df"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.986977 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d" (OuterVolumeSpecName: "kube-api-access-lzl2d") pod "f818b7cc-2b84-4bff-8c7e-e51a240179df" (UID: "f818b7cc-2b84-4bff-8c7e-e51a240179df"). InnerVolumeSpecName "kube-api-access-lzl2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:28:59 crc kubenswrapper[4622]: I1126 11:28:59.987077 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h" (OuterVolumeSpecName: "kube-api-access-9kq8h") pod "e920ae2c-a5a6-46e8-aca7-086abee6cf08" (UID: "e920ae2c-a5a6-46e8-aca7-086abee6cf08"). InnerVolumeSpecName "kube-api-access-9kq8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.003765 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data" (OuterVolumeSpecName: "config-data") pod "f818b7cc-2b84-4bff-8c7e-e51a240179df" (UID: "f818b7cc-2b84-4bff-8c7e-e51a240179df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.008371 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e920ae2c-a5a6-46e8-aca7-086abee6cf08" (UID: "e920ae2c-a5a6-46e8-aca7-086abee6cf08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.019442 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data" (OuterVolumeSpecName: "config-data") pod "e920ae2c-a5a6-46e8-aca7-086abee6cf08" (UID: "e920ae2c-a5a6-46e8-aca7-086abee6cf08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.026935 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f818b7cc-2b84-4bff-8c7e-e51a240179df" (UID: "f818b7cc-2b84-4bff-8c7e-e51a240179df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073630 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f818b7cc-2b84-4bff-8c7e-e51a240179df-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073679 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073697 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073707 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kq8h\" (UniqueName: \"kubernetes.io/projected/e920ae2c-a5a6-46e8-aca7-086abee6cf08-kube-api-access-9kq8h\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073721 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e920ae2c-a5a6-46e8-aca7-086abee6cf08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073730 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzl2d\" (UniqueName: \"kubernetes.io/projected/f818b7cc-2b84-4bff-8c7e-e51a240179df-kube-api-access-lzl2d\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.073739 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f818b7cc-2b84-4bff-8c7e-e51a240179df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.847877 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerStarted","Data":"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f"} Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.850912 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e920ae2c-a5a6-46e8-aca7-086abee6cf08","Type":"ContainerDied","Data":"0a2ff897083af7f8b699cb8dd81f66902a0094d572dbcccbf5fbe97c0d746f49"} Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.850960 4622 scope.go:117] "RemoveContainer" containerID="9a82fefb9ed634671cd159e591bbd723e72676ff703ab6f5d0d6c8cafbe09b2e" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.850967 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.854868 4622 generic.go:334] "Generic (PLEG): container finished" podID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerID="125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198" exitCode=143 Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.855081 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.855695 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerDied","Data":"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198"} Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.855884 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.873561 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.880619 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.898537 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.902103 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.914964 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: E1126 11:29:00.915399 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-metadata" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915414 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-metadata" Nov 26 11:29:00 crc kubenswrapper[4622]: E1126 11:29:00.915434 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" containerName="nova-cell1-novncproxy-novncproxy" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915441 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" containerName="nova-cell1-novncproxy-novncproxy" Nov 26 11:29:00 crc kubenswrapper[4622]: E1126 11:29:00.915454 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-log" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915460 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-log" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915656 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" containerName="nova-cell1-novncproxy-novncproxy" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915674 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-metadata" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.915683 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" containerName="nova-metadata-log" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.916276 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.918978 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.919125 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.919288 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.938966 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.941068 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.944431 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.944585 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.945185 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.957992 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.989743 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.989894 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92ltf\" (UniqueName: \"kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990064 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990179 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990279 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990403 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990541 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krp86\" (UniqueName: \"kubernetes.io/projected/6afcd63d-fe69-405b-bb79-4dfa143d4651-kube-api-access-krp86\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990719 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990844 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:00 crc kubenswrapper[4622]: I1126 11:29:00.990935 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093153 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093215 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093254 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krp86\" (UniqueName: \"kubernetes.io/projected/6afcd63d-fe69-405b-bb79-4dfa143d4651-kube-api-access-krp86\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093316 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093358 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093385 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093422 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093441 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92ltf\" (UniqueName: \"kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093482 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.093529 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.094350 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.098179 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.102669 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.103051 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.105348 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.109021 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92ltf\" (UniqueName: \"kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.110749 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krp86\" (UniqueName: \"kubernetes.io/projected/6afcd63d-fe69-405b-bb79-4dfa143d4651-kube-api-access-krp86\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.112845 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afcd63d-fe69-405b-bb79-4dfa143d4651-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6afcd63d-fe69-405b-bb79-4dfa143d4651\") " pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.113435 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.114019 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data\") pod \"nova-metadata-0\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.118028 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.236417 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.254732 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:01 crc kubenswrapper[4622]: W1126 11:29:01.659378 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6afcd63d_fe69_405b_bb79_4dfa143d4651.slice/crio-a1bf588c734e82b39d7e1472c1b48b2215de9ed8713d93d9fdc1728bb61fa2ac WatchSource:0}: Error finding container a1bf588c734e82b39d7e1472c1b48b2215de9ed8713d93d9fdc1728bb61fa2ac: Status 404 returned error can't find the container with id a1bf588c734e82b39d7e1472c1b48b2215de9ed8713d93d9fdc1728bb61fa2ac Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.661264 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.762809 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.866122 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerStarted","Data":"b299bf3ac6c0864941a9926d065f2d8504f065ab336e701ea3e1f9114b759fb2"} Nov 26 11:29:01 crc kubenswrapper[4622]: I1126 11:29:01.870574 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6afcd63d-fe69-405b-bb79-4dfa143d4651","Type":"ContainerStarted","Data":"a1bf588c734e82b39d7e1472c1b48b2215de9ed8713d93d9fdc1728bb61fa2ac"} Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.717715 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e920ae2c-a5a6-46e8-aca7-086abee6cf08" path="/var/lib/kubelet/pods/e920ae2c-a5a6-46e8-aca7-086abee6cf08/volumes" Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.718639 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f818b7cc-2b84-4bff-8c7e-e51a240179df" path="/var/lib/kubelet/pods/f818b7cc-2b84-4bff-8c7e-e51a240179df/volumes" Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885001 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerStarted","Data":"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a"} Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885210 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-central-agent" containerID="cri-o://fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded" gracePeriod=30 Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885567 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885764 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-notification-agent" containerID="cri-o://384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e" gracePeriod=30 Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885793 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="proxy-httpd" containerID="cri-o://e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a" gracePeriod=30 Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.885839 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="sg-core" containerID="cri-o://62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f" gracePeriod=30 Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.890274 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerStarted","Data":"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978"} Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.890325 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerStarted","Data":"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e"} Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.892891 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6afcd63d-fe69-405b-bb79-4dfa143d4651","Type":"ContainerStarted","Data":"5ebb1097e07e51e677e9408e1e5b3c3d9e6b77e19fb64d5769157f974c84888a"} Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.904878 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.808521245 podStartE2EDuration="6.904857025s" podCreationTimestamp="2025-11-26 11:28:56 +0000 UTC" firstStartedPulling="2025-11-26 11:28:57.586353784 +0000 UTC m=+1097.177565306" lastFinishedPulling="2025-11-26 11:29:01.682689564 +0000 UTC m=+1101.273901086" observedRunningTime="2025-11-26 11:29:02.900815608 +0000 UTC m=+1102.492027131" watchObservedRunningTime="2025-11-26 11:29:02.904857025 +0000 UTC m=+1102.496068546" Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.928898 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.928881704 podStartE2EDuration="2.928881704s" podCreationTimestamp="2025-11-26 11:29:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:02.924706517 +0000 UTC m=+1102.515918038" watchObservedRunningTime="2025-11-26 11:29:02.928881704 +0000 UTC m=+1102.520093226" Nov 26 11:29:02 crc kubenswrapper[4622]: I1126 11:29:02.950106 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.950088429 podStartE2EDuration="2.950088429s" podCreationTimestamp="2025-11-26 11:29:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:02.943462397 +0000 UTC m=+1102.534673919" watchObservedRunningTime="2025-11-26 11:29:02.950088429 +0000 UTC m=+1102.541299951" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.314133 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.445292 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle\") pod \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.445386 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data\") pod \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.445736 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs\") pod \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.445782 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svgg8\" (UniqueName: \"kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8\") pod \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\" (UID: \"62c44e56-3ca0-48fb-8f03-235fee59fa3d\") " Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.446720 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs" (OuterVolumeSpecName: "logs") pod "62c44e56-3ca0-48fb-8f03-235fee59fa3d" (UID: "62c44e56-3ca0-48fb-8f03-235fee59fa3d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.448493 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8" (OuterVolumeSpecName: "kube-api-access-svgg8") pod "62c44e56-3ca0-48fb-8f03-235fee59fa3d" (UID: "62c44e56-3ca0-48fb-8f03-235fee59fa3d"). InnerVolumeSpecName "kube-api-access-svgg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.464147 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62c44e56-3ca0-48fb-8f03-235fee59fa3d" (UID: "62c44e56-3ca0-48fb-8f03-235fee59fa3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.465293 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data" (OuterVolumeSpecName: "config-data") pod "62c44e56-3ca0-48fb-8f03-235fee59fa3d" (UID: "62c44e56-3ca0-48fb-8f03-235fee59fa3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.548117 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.548252 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c44e56-3ca0-48fb-8f03-235fee59fa3d-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.548267 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c44e56-3ca0-48fb-8f03-235fee59fa3d-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.548276 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svgg8\" (UniqueName: \"kubernetes.io/projected/62c44e56-3ca0-48fb-8f03-235fee59fa3d-kube-api-access-svgg8\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.906048 4622 generic.go:334] "Generic (PLEG): container finished" podID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerID="35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159" exitCode=0 Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.906238 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerDied","Data":"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159"} Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.906336 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"62c44e56-3ca0-48fb-8f03-235fee59fa3d","Type":"ContainerDied","Data":"f64712c808e60a9ca971d2b46ed1e079b0652e6d3b924be878644e13e26fc2b7"} Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.906370 4622 scope.go:117] "RemoveContainer" containerID="35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.906406 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.911787 4622 generic.go:334] "Generic (PLEG): container finished" podID="2ba985ba-4069-4084-9893-570b5f368f9d" containerID="e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a" exitCode=0 Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.911911 4622 generic.go:334] "Generic (PLEG): container finished" podID="2ba985ba-4069-4084-9893-570b5f368f9d" containerID="62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f" exitCode=2 Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.911982 4622 generic.go:334] "Generic (PLEG): container finished" podID="2ba985ba-4069-4084-9893-570b5f368f9d" containerID="384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e" exitCode=0 Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.912195 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerDied","Data":"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a"} Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.912228 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerDied","Data":"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f"} Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.912239 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerDied","Data":"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e"} Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.970060 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.972107 4622 scope.go:117] "RemoveContainer" containerID="125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198" Nov 26 11:29:03 crc kubenswrapper[4622]: I1126 11:29:03.992777 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.001157 4622 scope.go:117] "RemoveContainer" containerID="35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159" Nov 26 11:29:04 crc kubenswrapper[4622]: E1126 11:29:04.001883 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159\": container with ID starting with 35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159 not found: ID does not exist" containerID="35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.001920 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159"} err="failed to get container status \"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159\": rpc error: code = NotFound desc = could not find container \"35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159\": container with ID starting with 35b83b184e4f5a9eb935aa0cb478caf407e40d0cd0996f19e19010f0c7476159 not found: ID does not exist" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.001944 4622 scope.go:117] "RemoveContainer" containerID="125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198" Nov 26 11:29:04 crc kubenswrapper[4622]: E1126 11:29:04.002214 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198\": container with ID starting with 125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198 not found: ID does not exist" containerID="125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.002236 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198"} err="failed to get container status \"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198\": rpc error: code = NotFound desc = could not find container \"125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198\": container with ID starting with 125086f1fc78a7638449f1a7857998f588221cbc1845e567094c03c8b339c198 not found: ID does not exist" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.007851 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:04 crc kubenswrapper[4622]: E1126 11:29:04.008280 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-api" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.008299 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-api" Nov 26 11:29:04 crc kubenswrapper[4622]: E1126 11:29:04.008318 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-log" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.008324 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-log" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.008538 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-api" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.008566 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" containerName="nova-api-log" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.009721 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.012091 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.012139 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.012109 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.015411 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.061485 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.061573 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.061716 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.061742 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.061952 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwmpx\" (UniqueName: \"kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.062048 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.163928 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.163970 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.164043 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwmpx\" (UniqueName: \"kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.164087 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.164111 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.164132 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.165090 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.168474 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.168788 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.169161 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.171956 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.178100 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwmpx\" (UniqueName: \"kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx\") pod \"nova-api-0\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.234255 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265598 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265676 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265727 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265906 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqgtl\" (UniqueName: \"kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265932 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.265979 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266002 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266039 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data\") pod \"2ba985ba-4069-4084-9893-570b5f368f9d\" (UID: \"2ba985ba-4069-4084-9893-570b5f368f9d\") " Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266031 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266076 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266720 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.266738 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ba985ba-4069-4084-9893-570b5f368f9d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.269241 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl" (OuterVolumeSpecName: "kube-api-access-tqgtl") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "kube-api-access-tqgtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.269981 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts" (OuterVolumeSpecName: "scripts") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.290110 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.311832 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.324075 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.325645 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.342282 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data" (OuterVolumeSpecName: "config-data") pod "2ba985ba-4069-4084-9893-570b5f368f9d" (UID: "2ba985ba-4069-4084-9893-570b5f368f9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368614 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqgtl\" (UniqueName: \"kubernetes.io/projected/2ba985ba-4069-4084-9893-570b5f368f9d-kube-api-access-tqgtl\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368645 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368657 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368666 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368673 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.368681 4622 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba985ba-4069-4084-9893-570b5f368f9d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.714567 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c44e56-3ca0-48fb-8f03-235fee59fa3d" path="/var/lib/kubelet/pods/62c44e56-3ca0-48fb-8f03-235fee59fa3d/volumes" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.716731 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:04 crc kubenswrapper[4622]: W1126 11:29:04.718530 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2723fe0a_e74f_4934_8574_0b958246ee9f.slice/crio-98507c1489da54e74d1a7ce8fb32300cf6acfa28530c63daf2e90c6f61e0efa0 WatchSource:0}: Error finding container 98507c1489da54e74d1a7ce8fb32300cf6acfa28530c63daf2e90c6f61e0efa0: Status 404 returned error can't find the container with id 98507c1489da54e74d1a7ce8fb32300cf6acfa28530c63daf2e90c6f61e0efa0 Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.921782 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerStarted","Data":"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a"} Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.921854 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerStarted","Data":"98507c1489da54e74d1a7ce8fb32300cf6acfa28530c63daf2e90c6f61e0efa0"} Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.924635 4622 generic.go:334] "Generic (PLEG): container finished" podID="2ba985ba-4069-4084-9893-570b5f368f9d" containerID="fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded" exitCode=0 Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.924713 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.924721 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerDied","Data":"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded"} Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.924755 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2ba985ba-4069-4084-9893-570b5f368f9d","Type":"ContainerDied","Data":"15393f63233c04bb8917976eeb91f2b1bdd25696677121d5100e22edec243b8b"} Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.924774 4622 scope.go:117] "RemoveContainer" containerID="e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.956986 4622 scope.go:117] "RemoveContainer" containerID="62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.976209 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.980284 4622 scope.go:117] "RemoveContainer" containerID="384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e" Nov 26 11:29:04 crc kubenswrapper[4622]: I1126 11:29:04.982919 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.003884 4622 scope.go:117] "RemoveContainer" containerID="fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.008575 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.008983 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-central-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009003 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-central-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.009012 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-notification-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009017 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-notification-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.009027 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="sg-core" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009032 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="sg-core" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.009061 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="proxy-httpd" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009066 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="proxy-httpd" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009541 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="sg-core" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009566 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-central-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009591 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="proxy-httpd" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.009605 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" containerName="ceilometer-notification-agent" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.019098 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.021217 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.021257 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.021973 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.027603 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.029671 4622 scope.go:117] "RemoveContainer" containerID="e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.031025 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a\": container with ID starting with e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a not found: ID does not exist" containerID="e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031067 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a"} err="failed to get container status \"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a\": rpc error: code = NotFound desc = could not find container \"e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a\": container with ID starting with e4998ae83ea42a2c3032a8c6728541f47d36786eaa4aff9ecda539fe7833475a not found: ID does not exist" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031106 4622 scope.go:117] "RemoveContainer" containerID="62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.031479 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f\": container with ID starting with 62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f not found: ID does not exist" containerID="62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031554 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f"} err="failed to get container status \"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f\": rpc error: code = NotFound desc = could not find container \"62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f\": container with ID starting with 62214bfcc418a79e6a0072c2c680cdbad54b7f5c7a1e59f4d7b0bed9929b951f not found: ID does not exist" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031581 4622 scope.go:117] "RemoveContainer" containerID="384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.031941 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e\": container with ID starting with 384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e not found: ID does not exist" containerID="384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031974 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e"} err="failed to get container status \"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e\": rpc error: code = NotFound desc = could not find container \"384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e\": container with ID starting with 384f371803367002f603ed8702c4a46581d03034afe293b88a7e035542329c5e not found: ID does not exist" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.031989 4622 scope.go:117] "RemoveContainer" containerID="fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded" Nov 26 11:29:05 crc kubenswrapper[4622]: E1126 11:29:05.032225 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded\": container with ID starting with fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded not found: ID does not exist" containerID="fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.032242 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded"} err="failed to get container status \"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded\": rpc error: code = NotFound desc = could not find container \"fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded\": container with ID starting with fdb5457cc034539894799e111a0d81fe091a441a3da67a06639a081329eacded not found: ID does not exist" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.082921 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083126 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083326 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083446 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg94r\" (UniqueName: \"kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083564 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083681 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083757 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.083898 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.186430 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.186626 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.187155 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.187301 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.187332 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.187901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.188007 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg94r\" (UniqueName: \"kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.188076 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.188144 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.188789 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.190961 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.190975 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.191332 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.191413 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.199655 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.201583 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg94r\" (UniqueName: \"kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r\") pod \"ceilometer-0\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.358925 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.742193 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.935172 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerStarted","Data":"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba"} Nov 26 11:29:05 crc kubenswrapper[4622]: I1126 11:29:05.937176 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerStarted","Data":"a8f64ecb8c06ce8ae174938056fa54efbc0dc8b44e6a191df096a0bfa9ccfdca"} Nov 26 11:29:06 crc kubenswrapper[4622]: I1126 11:29:06.236708 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:06 crc kubenswrapper[4622]: I1126 11:29:06.255447 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:29:06 crc kubenswrapper[4622]: I1126 11:29:06.255529 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:29:06 crc kubenswrapper[4622]: I1126 11:29:06.721746 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ba985ba-4069-4084-9893-570b5f368f9d" path="/var/lib/kubelet/pods/2ba985ba-4069-4084-9893-570b5f368f9d/volumes" Nov 26 11:29:06 crc kubenswrapper[4622]: I1126 11:29:06.947109 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerStarted","Data":"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad"} Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.526686 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.543633 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.54361014 podStartE2EDuration="4.54361014s" podCreationTimestamp="2025-11-26 11:29:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:05.952737191 +0000 UTC m=+1105.543948713" watchObservedRunningTime="2025-11-26 11:29:07.54361014 +0000 UTC m=+1107.134821662" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.586237 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.586466 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="dnsmasq-dns" containerID="cri-o://f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4" gracePeriod=10 Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.921130 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.936226 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p76nj\" (UniqueName: \"kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj\") pod \"926ada02-c646-4251-b21a-3e341402d619\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.936692 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config\") pod \"926ada02-c646-4251-b21a-3e341402d619\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.937189 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb\") pod \"926ada02-c646-4251-b21a-3e341402d619\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.937379 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc\") pod \"926ada02-c646-4251-b21a-3e341402d619\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.937567 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb\") pod \"926ada02-c646-4251-b21a-3e341402d619\" (UID: \"926ada02-c646-4251-b21a-3e341402d619\") " Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.949810 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj" (OuterVolumeSpecName: "kube-api-access-p76nj") pod "926ada02-c646-4251-b21a-3e341402d619" (UID: "926ada02-c646-4251-b21a-3e341402d619"). InnerVolumeSpecName "kube-api-access-p76nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.964238 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerStarted","Data":"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312"} Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.966311 4622 generic.go:334] "Generic (PLEG): container finished" podID="926ada02-c646-4251-b21a-3e341402d619" containerID="f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4" exitCode=0 Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.966356 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" event={"ID":"926ada02-c646-4251-b21a-3e341402d619","Type":"ContainerDied","Data":"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4"} Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.966378 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" event={"ID":"926ada02-c646-4251-b21a-3e341402d619","Type":"ContainerDied","Data":"2af32bd3ab40d12f5374c13e8562efe834de8d3e3b948f85d71952fb6ca42f1e"} Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.966394 4622 scope.go:117] "RemoveContainer" containerID="f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.966481 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69494d9f89-vqqzj" Nov 26 11:29:07 crc kubenswrapper[4622]: I1126 11:29:07.993607 4622 scope.go:117] "RemoveContainer" containerID="eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.018957 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "926ada02-c646-4251-b21a-3e341402d619" (UID: "926ada02-c646-4251-b21a-3e341402d619"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.023040 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "926ada02-c646-4251-b21a-3e341402d619" (UID: "926ada02-c646-4251-b21a-3e341402d619"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.023087 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config" (OuterVolumeSpecName: "config") pod "926ada02-c646-4251-b21a-3e341402d619" (UID: "926ada02-c646-4251-b21a-3e341402d619"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.024057 4622 scope.go:117] "RemoveContainer" containerID="f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4" Nov 26 11:29:08 crc kubenswrapper[4622]: E1126 11:29:08.024420 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4\": container with ID starting with f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4 not found: ID does not exist" containerID="f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.024527 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4"} err="failed to get container status \"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4\": rpc error: code = NotFound desc = could not find container \"f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4\": container with ID starting with f26ccc070a5e12b7545f55582a479edf470f0145d190be453748689006dd0ad4 not found: ID does not exist" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.024608 4622 scope.go:117] "RemoveContainer" containerID="eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9" Nov 26 11:29:08 crc kubenswrapper[4622]: E1126 11:29:08.024947 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9\": container with ID starting with eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9 not found: ID does not exist" containerID="eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.025029 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9"} err="failed to get container status \"eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9\": rpc error: code = NotFound desc = could not find container \"eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9\": container with ID starting with eed67efc1cf030497301bc60cf350e8a1d9b2c92c0e482b0626b03751e747ba9 not found: ID does not exist" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.031421 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "926ada02-c646-4251-b21a-3e341402d619" (UID: "926ada02-c646-4251-b21a-3e341402d619"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.039680 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p76nj\" (UniqueName: \"kubernetes.io/projected/926ada02-c646-4251-b21a-3e341402d619-kube-api-access-p76nj\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.039707 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.039718 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.039727 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.039735 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/926ada02-c646-4251-b21a-3e341402d619-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.294074 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.299966 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69494d9f89-vqqzj"] Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.742539 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="926ada02-c646-4251-b21a-3e341402d619" path="/var/lib/kubelet/pods/926ada02-c646-4251-b21a-3e341402d619/volumes" Nov 26 11:29:08 crc kubenswrapper[4622]: I1126 11:29:08.979683 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerStarted","Data":"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17"} Nov 26 11:29:09 crc kubenswrapper[4622]: I1126 11:29:09.992247 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerStarted","Data":"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf"} Nov 26 11:29:09 crc kubenswrapper[4622]: I1126 11:29:09.992582 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:29:11 crc kubenswrapper[4622]: I1126 11:29:11.236899 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:11 crc kubenswrapper[4622]: I1126 11:29:11.254993 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 26 11:29:11 crc kubenswrapper[4622]: I1126 11:29:11.255105 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 26 11:29:11 crc kubenswrapper[4622]: I1126 11:29:11.257453 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:11 crc kubenswrapper[4622]: I1126 11:29:11.284262 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.695712856 podStartE2EDuration="7.284237915s" podCreationTimestamp="2025-11-26 11:29:04 +0000 UTC" firstStartedPulling="2025-11-26 11:29:05.739907013 +0000 UTC m=+1105.331118536" lastFinishedPulling="2025-11-26 11:29:09.328432073 +0000 UTC m=+1108.919643595" observedRunningTime="2025-11-26 11:29:10.036749618 +0000 UTC m=+1109.627961140" watchObservedRunningTime="2025-11-26 11:29:11.284237915 +0000 UTC m=+1110.875449427" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.053137 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.182489 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-cnkv2"] Nov 26 11:29:12 crc kubenswrapper[4622]: E1126 11:29:12.183135 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="init" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.183154 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="init" Nov 26 11:29:12 crc kubenswrapper[4622]: E1126 11:29:12.183172 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="dnsmasq-dns" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.183180 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="dnsmasq-dns" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.183392 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="926ada02-c646-4251-b21a-3e341402d619" containerName="dnsmasq-dns" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.183988 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.185627 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.186674 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.197634 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-cnkv2"] Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.226793 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.226866 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.226968 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.226989 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qr2k\" (UniqueName: \"kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.262988 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.268686 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.330056 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.330144 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.330237 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.330263 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qr2k\" (UniqueName: \"kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.337595 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.343246 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.343267 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.348278 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qr2k\" (UniqueName: \"kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k\") pod \"nova-cell1-cell-mapping-cnkv2\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.504878 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:12 crc kubenswrapper[4622]: W1126 11:29:12.957653 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9702ef2d_d47c_4e61_a8cb_c5e2b8ccd03c.slice/crio-0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1 WatchSource:0}: Error finding container 0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1: Status 404 returned error can't find the container with id 0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1 Nov 26 11:29:12 crc kubenswrapper[4622]: I1126 11:29:12.963762 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-cnkv2"] Nov 26 11:29:13 crc kubenswrapper[4622]: I1126 11:29:13.044684 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-cnkv2" event={"ID":"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c","Type":"ContainerStarted","Data":"0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1"} Nov 26 11:29:14 crc kubenswrapper[4622]: I1126 11:29:14.055179 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-cnkv2" event={"ID":"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c","Type":"ContainerStarted","Data":"c07fcec673f0d8eaafa07191c76371194dfe82e9ed385a67365d44bc8fe67576"} Nov 26 11:29:14 crc kubenswrapper[4622]: I1126 11:29:14.078330 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-cnkv2" podStartSLOduration=2.07831315 podStartE2EDuration="2.07831315s" podCreationTimestamp="2025-11-26 11:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:14.068857339 +0000 UTC m=+1113.660068862" watchObservedRunningTime="2025-11-26 11:29:14.07831315 +0000 UTC m=+1113.669524673" Nov 26 11:29:14 crc kubenswrapper[4622]: I1126 11:29:14.326562 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:29:14 crc kubenswrapper[4622]: I1126 11:29:14.326611 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.199446 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.199558 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.199627 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.200496 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.200567 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4" gracePeriod=600 Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.337662 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:15 crc kubenswrapper[4622]: I1126 11:29:15.337683 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:16 crc kubenswrapper[4622]: I1126 11:29:16.078733 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4" exitCode=0 Nov 26 11:29:16 crc kubenswrapper[4622]: I1126 11:29:16.078820 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4"} Nov 26 11:29:16 crc kubenswrapper[4622]: I1126 11:29:16.079151 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119"} Nov 26 11:29:16 crc kubenswrapper[4622]: I1126 11:29:16.079181 4622 scope.go:117] "RemoveContainer" containerID="d86abbe8387fbedaee53ded7b61aeb7def7973bb53da7bd06534996b89fc85df" Nov 26 11:29:17 crc kubenswrapper[4622]: I1126 11:29:17.093235 4622 generic.go:334] "Generic (PLEG): container finished" podID="9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" containerID="c07fcec673f0d8eaafa07191c76371194dfe82e9ed385a67365d44bc8fe67576" exitCode=0 Nov 26 11:29:17 crc kubenswrapper[4622]: I1126 11:29:17.093311 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-cnkv2" event={"ID":"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c","Type":"ContainerDied","Data":"c07fcec673f0d8eaafa07191c76371194dfe82e9ed385a67365d44bc8fe67576"} Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.395059 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.473734 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data\") pod \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.473801 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts\") pod \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.473854 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qr2k\" (UniqueName: \"kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k\") pod \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.474712 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle\") pod \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\" (UID: \"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c\") " Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.480332 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts" (OuterVolumeSpecName: "scripts") pod "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" (UID: "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.480532 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k" (OuterVolumeSpecName: "kube-api-access-7qr2k") pod "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" (UID: "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c"). InnerVolumeSpecName "kube-api-access-7qr2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.496443 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data" (OuterVolumeSpecName: "config-data") pod "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" (UID: "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.499145 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" (UID: "9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.578321 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.578392 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qr2k\" (UniqueName: \"kubernetes.io/projected/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-kube-api-access-7qr2k\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.578407 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:18 crc kubenswrapper[4622]: I1126 11:29:18.578417 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.109583 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-cnkv2" event={"ID":"9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c","Type":"ContainerDied","Data":"0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1"} Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.109636 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bfb859c9b544a5f57ecc1e6344dd68304e0e2ad764e3a7a373dfa5fa22691d1" Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.109702 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-cnkv2" Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.314536 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.314816 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-log" containerID="cri-o://7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a" gracePeriod=30 Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.314906 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-api" containerID="cri-o://75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba" gracePeriod=30 Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.324020 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.324206 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e0264e46-87bd-4327-a62b-a71c1af8c806" containerName="nova-scheduler-scheduler" containerID="cri-o://24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9" gracePeriod=30 Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.346259 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.346536 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-log" containerID="cri-o://11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e" gracePeriod=30 Nov 26 11:29:19 crc kubenswrapper[4622]: I1126 11:29:19.346615 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-metadata" containerID="cri-o://7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978" gracePeriod=30 Nov 26 11:29:20 crc kubenswrapper[4622]: I1126 11:29:20.119078 4622 generic.go:334] "Generic (PLEG): container finished" podID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerID="11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e" exitCode=143 Nov 26 11:29:20 crc kubenswrapper[4622]: I1126 11:29:20.119155 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerDied","Data":"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e"} Nov 26 11:29:20 crc kubenswrapper[4622]: I1126 11:29:20.124384 4622 generic.go:334] "Generic (PLEG): container finished" podID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerID="7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a" exitCode=143 Nov 26 11:29:20 crc kubenswrapper[4622]: I1126 11:29:20.124428 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerDied","Data":"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a"} Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.456589 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.545810 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle\") pod \"e0264e46-87bd-4327-a62b-a71c1af8c806\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.546204 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data\") pod \"e0264e46-87bd-4327-a62b-a71c1af8c806\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.546264 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9wqn\" (UniqueName: \"kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn\") pod \"e0264e46-87bd-4327-a62b-a71c1af8c806\" (UID: \"e0264e46-87bd-4327-a62b-a71c1af8c806\") " Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.551876 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn" (OuterVolumeSpecName: "kube-api-access-z9wqn") pod "e0264e46-87bd-4327-a62b-a71c1af8c806" (UID: "e0264e46-87bd-4327-a62b-a71c1af8c806"). InnerVolumeSpecName "kube-api-access-z9wqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.573051 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0264e46-87bd-4327-a62b-a71c1af8c806" (UID: "e0264e46-87bd-4327-a62b-a71c1af8c806"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.575798 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data" (OuterVolumeSpecName: "config-data") pod "e0264e46-87bd-4327-a62b-a71c1af8c806" (UID: "e0264e46-87bd-4327-a62b-a71c1af8c806"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.648557 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.648593 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9wqn\" (UniqueName: \"kubernetes.io/projected/e0264e46-87bd-4327-a62b-a71c1af8c806-kube-api-access-z9wqn\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:21 crc kubenswrapper[4622]: I1126 11:29:21.648605 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0264e46-87bd-4327-a62b-a71c1af8c806-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.146977 4622 generic.go:334] "Generic (PLEG): container finished" podID="e0264e46-87bd-4327-a62b-a71c1af8c806" containerID="24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9" exitCode=0 Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.147045 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e0264e46-87bd-4327-a62b-a71c1af8c806","Type":"ContainerDied","Data":"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9"} Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.147101 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e0264e46-87bd-4327-a62b-a71c1af8c806","Type":"ContainerDied","Data":"3815e045e6d7d46cbb9c648cfc62220742c76b87697cc6756ccd37acba5b6e50"} Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.147096 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.147126 4622 scope.go:117] "RemoveContainer" containerID="24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.176350 4622 scope.go:117] "RemoveContainer" containerID="24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9" Nov 26 11:29:22 crc kubenswrapper[4622]: E1126 11:29:22.176783 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9\": container with ID starting with 24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9 not found: ID does not exist" containerID="24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.176828 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9"} err="failed to get container status \"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9\": rpc error: code = NotFound desc = could not find container \"24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9\": container with ID starting with 24c8df9b9b22ded5c1f06aea78445cf4bbe7e128044207264dd8728949284fd9 not found: ID does not exist" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.178726 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.185817 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.195929 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:22 crc kubenswrapper[4622]: E1126 11:29:22.196305 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" containerName="nova-manage" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.196324 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" containerName="nova-manage" Nov 26 11:29:22 crc kubenswrapper[4622]: E1126 11:29:22.196342 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0264e46-87bd-4327-a62b-a71c1af8c806" containerName="nova-scheduler-scheduler" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.196348 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0264e46-87bd-4327-a62b-a71c1af8c806" containerName="nova-scheduler-scheduler" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.196602 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" containerName="nova-manage" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.196621 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0264e46-87bd-4327-a62b-a71c1af8c806" containerName="nova-scheduler-scheduler" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.197441 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.199900 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.205530 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.262933 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.263082 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.263234 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bgmz\" (UniqueName: \"kubernetes.io/projected/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-kube-api-access-7bgmz\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.365616 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.365781 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bgmz\" (UniqueName: \"kubernetes.io/projected/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-kube-api-access-7bgmz\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.365920 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.370034 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.370308 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.380978 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bgmz\" (UniqueName: \"kubernetes.io/projected/e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7-kube-api-access-7bgmz\") pod \"nova-scheduler-0\" (UID: \"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7\") " pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.522662 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.736212 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0264e46-87bd-4327-a62b-a71c1af8c806" path="/var/lib/kubelet/pods/e0264e46-87bd-4327-a62b-a71c1af8c806/volumes" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.864278 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.979699 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.979873 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.980024 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.980580 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwmpx\" (UniqueName: \"kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.980677 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.980744 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data\") pod \"2723fe0a-e74f-4934-8574-0b958246ee9f\" (UID: \"2723fe0a-e74f-4934-8574-0b958246ee9f\") " Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.981610 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs" (OuterVolumeSpecName: "logs") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.982342 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2723fe0a-e74f-4934-8574-0b958246ee9f-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.986598 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx" (OuterVolumeSpecName: "kube-api-access-vwmpx") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "kube-api-access-vwmpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:22 crc kubenswrapper[4622]: I1126 11:29:22.986790 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.015829 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data" (OuterVolumeSpecName: "config-data") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.017088 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.029320 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.039229 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2723fe0a-e74f-4934-8574-0b958246ee9f" (UID: "2723fe0a-e74f-4934-8574-0b958246ee9f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.083744 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs\") pod \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.083961 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data\") pod \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084008 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle\") pod \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084042 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs\") pod \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084061 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92ltf\" (UniqueName: \"kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf\") pod \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\" (UID: \"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603\") " Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084424 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs" (OuterVolumeSpecName: "logs") pod "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" (UID: "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084911 4622 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084939 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084952 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084961 4622 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084972 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwmpx\" (UniqueName: \"kubernetes.io/projected/2723fe0a-e74f-4934-8574-0b958246ee9f-kube-api-access-vwmpx\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.084985 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2723fe0a-e74f-4934-8574-0b958246ee9f-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.086995 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf" (OuterVolumeSpecName: "kube-api-access-92ltf") pod "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" (UID: "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603"). InnerVolumeSpecName "kube-api-access-92ltf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.102767 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data" (OuterVolumeSpecName: "config-data") pod "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" (UID: "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.103363 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" (UID: "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.117056 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" (UID: "5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.162485 4622 generic.go:334] "Generic (PLEG): container finished" podID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerID="7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978" exitCode=0 Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.162715 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.162826 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerDied","Data":"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978"} Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.162894 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603","Type":"ContainerDied","Data":"b299bf3ac6c0864941a9926d065f2d8504f065ab336e701ea3e1f9114b759fb2"} Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.162919 4622 scope.go:117] "RemoveContainer" containerID="7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.169246 4622 generic.go:334] "Generic (PLEG): container finished" podID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerID="75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba" exitCode=0 Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.169480 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.169537 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerDied","Data":"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba"} Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.169630 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2723fe0a-e74f-4934-8574-0b958246ee9f","Type":"ContainerDied","Data":"98507c1489da54e74d1a7ce8fb32300cf6acfa28530c63daf2e90c6f61e0efa0"} Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.180237 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.187873 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.187924 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.187939 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92ltf\" (UniqueName: \"kubernetes.io/projected/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-kube-api-access-92ltf\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.187951 4622 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.190476 4622 scope.go:117] "RemoveContainer" containerID="11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.211170 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.216554 4622 scope.go:117] "RemoveContainer" containerID="7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.220780 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.226647 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978\": container with ID starting with 7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978 not found: ID does not exist" containerID="7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.226690 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978"} err="failed to get container status \"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978\": rpc error: code = NotFound desc = could not find container \"7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978\": container with ID starting with 7041ca0552da59d13f47875f7b4ec29849434631880f98dee3e312f98d156978 not found: ID does not exist" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.226721 4622 scope.go:117] "RemoveContainer" containerID="11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.229410 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.229760 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e\": container with ID starting with 11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e not found: ID does not exist" containerID="11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.229802 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e"} err="failed to get container status \"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e\": rpc error: code = NotFound desc = could not find container \"11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e\": container with ID starting with 11d69da12cf59f6864db6dcedde0cc628082044bdcd91cc7b83a3e25ebacb47e not found: ID does not exist" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.229831 4622 scope.go:117] "RemoveContainer" containerID="75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.238863 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.239250 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-log" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239265 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-log" Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.239279 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-log" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239284 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-log" Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.239307 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-metadata" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239314 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-metadata" Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.239327 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-api" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239332 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-api" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239573 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-metadata" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239583 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" containerName="nova-metadata-log" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239606 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-api" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.239615 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" containerName="nova-api-log" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.240645 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.242855 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.243079 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.251558 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.259847 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.268030 4622 scope.go:117] "RemoveContainer" containerID="7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.281751 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.283441 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.288737 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.289860 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.290122 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.290262 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.291745 4622 scope.go:117] "RemoveContainer" containerID="75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba" Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.292475 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba\": container with ID starting with 75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba not found: ID does not exist" containerID="75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.292527 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba"} err="failed to get container status \"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba\": rpc error: code = NotFound desc = could not find container \"75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba\": container with ID starting with 75f73c342982c06899fbbd556d8b07e0acf561ab747d8ad0f51583d3779de9ba not found: ID does not exist" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.292552 4622 scope.go:117] "RemoveContainer" containerID="7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a" Nov 26 11:29:23 crc kubenswrapper[4622]: E1126 11:29:23.294726 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a\": container with ID starting with 7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a not found: ID does not exist" containerID="7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.294759 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a"} err="failed to get container status \"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a\": rpc error: code = NotFound desc = could not find container \"7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a\": container with ID starting with 7a16e280425e7d7fb90ffd2873bcf880459fa253cf61242440ae33db33902f0a not found: ID does not exist" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.390969 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391211 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtmft\" (UniqueName: \"kubernetes.io/projected/ca80895b-9518-45be-8896-06591e9356b2-kube-api-access-rtmft\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391281 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-public-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391315 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs6bv\" (UniqueName: \"kubernetes.io/projected/f079e6de-650d-45de-9ee0-bf7cc94c67b3-kube-api-access-cs6bv\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391339 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391425 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391475 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-config-data\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391498 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391571 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca80895b-9518-45be-8896-06591e9356b2-logs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391602 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f079e6de-650d-45de-9ee0-bf7cc94c67b3-logs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.391725 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-config-data\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494097 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca80895b-9518-45be-8896-06591e9356b2-logs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494150 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f079e6de-650d-45de-9ee0-bf7cc94c67b3-logs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494317 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-config-data\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494414 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494447 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtmft\" (UniqueName: \"kubernetes.io/projected/ca80895b-9518-45be-8896-06591e9356b2-kube-api-access-rtmft\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-public-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494607 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs6bv\" (UniqueName: \"kubernetes.io/projected/f079e6de-650d-45de-9ee0-bf7cc94c67b3-kube-api-access-cs6bv\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494642 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494691 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494773 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-config-data\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.494803 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.495175 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f079e6de-650d-45de-9ee0-bf7cc94c67b3-logs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.495202 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca80895b-9518-45be-8896-06591e9356b2-logs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.499139 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-public-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.499715 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-config-data\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.501704 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.509832 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.511010 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.511285 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca80895b-9518-45be-8896-06591e9356b2-config-data\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.512035 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f079e6de-650d-45de-9ee0-bf7cc94c67b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.512692 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs6bv\" (UniqueName: \"kubernetes.io/projected/f079e6de-650d-45de-9ee0-bf7cc94c67b3-kube-api-access-cs6bv\") pod \"nova-metadata-0\" (UID: \"f079e6de-650d-45de-9ee0-bf7cc94c67b3\") " pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.520616 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtmft\" (UniqueName: \"kubernetes.io/projected/ca80895b-9518-45be-8896-06591e9356b2-kube-api-access-rtmft\") pod \"nova-api-0\" (UID: \"ca80895b-9518-45be-8896-06591e9356b2\") " pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.562457 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.599311 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 26 11:29:23 crc kubenswrapper[4622]: W1126 11:29:23.992319 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf079e6de_650d_45de_9ee0_bf7cc94c67b3.slice/crio-e6838179671724bd91406e513680c544e12f363265958e2c5576176867df57cd WatchSource:0}: Error finding container e6838179671724bd91406e513680c544e12f363265958e2c5576176867df57cd: Status 404 returned error can't find the container with id e6838179671724bd91406e513680c544e12f363265958e2c5576176867df57cd Nov 26 11:29:23 crc kubenswrapper[4622]: I1126 11:29:23.996815 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.063086 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.182968 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7","Type":"ContainerStarted","Data":"19c425a0c6adf3c80203a7339e0a1e50d8bbaabb916bdee88ccb608aea10a489"} Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.183604 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7","Type":"ContainerStarted","Data":"bf104765b66fe5efaa5af3f5f48a218fa12d0e6f97ae8e9de00373c942a735e4"} Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.188779 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f079e6de-650d-45de-9ee0-bf7cc94c67b3","Type":"ContainerStarted","Data":"d8b13e5b3683a0f457944852bd853c84034c0029faa80e5d1f79721398bbbf9f"} Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.188871 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f079e6de-650d-45de-9ee0-bf7cc94c67b3","Type":"ContainerStarted","Data":"e6838179671724bd91406e513680c544e12f363265958e2c5576176867df57cd"} Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.191579 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ca80895b-9518-45be-8896-06591e9356b2","Type":"ContainerStarted","Data":"604086bbb40953a927e403f1a5d2059ac864137938b846e1262e7209219919c4"} Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.209684 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.209659418 podStartE2EDuration="2.209659418s" podCreationTimestamp="2025-11-26 11:29:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:24.203732134 +0000 UTC m=+1123.794943656" watchObservedRunningTime="2025-11-26 11:29:24.209659418 +0000 UTC m=+1123.800870940" Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.731448 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2723fe0a-e74f-4934-8574-0b958246ee9f" path="/var/lib/kubelet/pods/2723fe0a-e74f-4934-8574-0b958246ee9f/volumes" Nov 26 11:29:24 crc kubenswrapper[4622]: I1126 11:29:24.733018 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603" path="/var/lib/kubelet/pods/5ea5fee9-d7ff-45a7-ac5a-fa1b9da3f603/volumes" Nov 26 11:29:25 crc kubenswrapper[4622]: I1126 11:29:25.204064 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ca80895b-9518-45be-8896-06591e9356b2","Type":"ContainerStarted","Data":"953210c406b4079864cae4c46ca86be5625220887f298300371075cdfcd26144"} Nov 26 11:29:25 crc kubenswrapper[4622]: I1126 11:29:25.204476 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ca80895b-9518-45be-8896-06591e9356b2","Type":"ContainerStarted","Data":"6f018691493427ca271d99e68e4957f00986420a53b1783179e914a35e1e581d"} Nov 26 11:29:25 crc kubenswrapper[4622]: I1126 11:29:25.205977 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f079e6de-650d-45de-9ee0-bf7cc94c67b3","Type":"ContainerStarted","Data":"6a6d87cca05de6b1cc6964c713187abe18410786dea35c760f6c97c3622c2ac3"} Nov 26 11:29:25 crc kubenswrapper[4622]: I1126 11:29:25.266207 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.266187082 podStartE2EDuration="2.266187082s" podCreationTimestamp="2025-11-26 11:29:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:25.246933783 +0000 UTC m=+1124.838145306" watchObservedRunningTime="2025-11-26 11:29:25.266187082 +0000 UTC m=+1124.857398604" Nov 26 11:29:25 crc kubenswrapper[4622]: I1126 11:29:25.268474 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.268459991 podStartE2EDuration="2.268459991s" podCreationTimestamp="2025-11-26 11:29:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:29:25.265684283 +0000 UTC m=+1124.856895805" watchObservedRunningTime="2025-11-26 11:29:25.268459991 +0000 UTC m=+1124.859671512" Nov 26 11:29:27 crc kubenswrapper[4622]: I1126 11:29:27.522989 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 26 11:29:28 crc kubenswrapper[4622]: I1126 11:29:28.562855 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:29:28 crc kubenswrapper[4622]: I1126 11:29:28.562930 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 26 11:29:32 crc kubenswrapper[4622]: I1126 11:29:32.523764 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 26 11:29:32 crc kubenswrapper[4622]: I1126 11:29:32.550303 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 26 11:29:33 crc kubenswrapper[4622]: I1126 11:29:33.306187 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 26 11:29:33 crc kubenswrapper[4622]: I1126 11:29:33.563536 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 26 11:29:33 crc kubenswrapper[4622]: I1126 11:29:33.563612 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 26 11:29:33 crc kubenswrapper[4622]: I1126 11:29:33.600864 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:29:33 crc kubenswrapper[4622]: I1126 11:29:33.600933 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 26 11:29:34 crc kubenswrapper[4622]: I1126 11:29:34.577638 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f079e6de-650d-45de-9ee0-bf7cc94c67b3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:34 crc kubenswrapper[4622]: I1126 11:29:34.577662 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f079e6de-650d-45de-9ee0-bf7cc94c67b3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:34 crc kubenswrapper[4622]: I1126 11:29:34.611616 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ca80895b-9518-45be-8896-06591e9356b2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.192:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:34 crc kubenswrapper[4622]: I1126 11:29:34.611693 4622 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ca80895b-9518-45be-8896-06591e9356b2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.192:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 26 11:29:35 crc kubenswrapper[4622]: I1126 11:29:35.367991 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.568993 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.569529 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.574557 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.576368 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.628966 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.630295 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.630437 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 26 11:29:43 crc kubenswrapper[4622]: I1126 11:29:43.635204 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 26 11:29:44 crc kubenswrapper[4622]: I1126 11:29:44.384459 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 26 11:29:44 crc kubenswrapper[4622]: I1126 11:29:44.390699 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 26 11:29:50 crc kubenswrapper[4622]: I1126 11:29:50.764283 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:29:51 crc kubenswrapper[4622]: I1126 11:29:51.561179 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:29:54 crc kubenswrapper[4622]: I1126 11:29:54.369605 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="rabbitmq" containerID="cri-o://a8b7ed8165d741ca74012d4ee2b84f6c0cfb640926e04d1fedef8bbf4bb5145f" gracePeriod=604797 Nov 26 11:29:55 crc kubenswrapper[4622]: I1126 11:29:55.156570 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="rabbitmq" containerID="cri-o://536c6d3ee68590a3ee610d404c45fa1623d311b72c6455e3c5300793331111de" gracePeriod=604797 Nov 26 11:29:57 crc kubenswrapper[4622]: I1126 11:29:57.129997 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.94:5671: connect: connection refused" Nov 26 11:29:57 crc kubenswrapper[4622]: I1126 11:29:57.411062 4622 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5671: connect: connection refused" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.133364 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk"] Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.140922 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.142602 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.143056 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.143871 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk"] Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.189957 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.190004 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.190357 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtg95\" (UniqueName: \"kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.293039 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtg95\" (UniqueName: \"kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.293136 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.293173 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.294170 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.298521 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.308066 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtg95\" (UniqueName: \"kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95\") pod \"collect-profiles-29402610-x9qqk\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.463689 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.529719 4622 generic.go:334] "Generic (PLEG): container finished" podID="38fbda9e-5203-4941-829d-1309dcf835e9" containerID="a8b7ed8165d741ca74012d4ee2b84f6c0cfb640926e04d1fedef8bbf4bb5145f" exitCode=0 Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.529763 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerDied","Data":"a8b7ed8165d741ca74012d4ee2b84f6c0cfb640926e04d1fedef8bbf4bb5145f"} Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.790596 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803033 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803122 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803194 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803274 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803366 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803411 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803518 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h65sw\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803618 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803729 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803790 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.803864 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie\") pod \"38fbda9e-5203-4941-829d-1309dcf835e9\" (UID: \"38fbda9e-5203-4941-829d-1309dcf835e9\") " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.804089 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.805151 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.805348 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.805950 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.808280 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.809367 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw" (OuterVolumeSpecName: "kube-api-access-h65sw") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "kube-api-access-h65sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.809834 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.823276 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info" (OuterVolumeSpecName: "pod-info") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.823453 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.848078 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data" (OuterVolumeSpecName: "config-data") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.857892 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf" (OuterVolumeSpecName: "server-conf") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906730 4622 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38fbda9e-5203-4941-829d-1309dcf835e9-pod-info\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906758 4622 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906768 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906777 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906807 4622 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906816 4622 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38fbda9e-5203-4941-829d-1309dcf835e9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906826 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906835 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h65sw\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-kube-api-access-h65sw\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.906843 4622 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38fbda9e-5203-4941-829d-1309dcf835e9-server-conf\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.926798 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "38fbda9e-5203-4941-829d-1309dcf835e9" (UID: "38fbda9e-5203-4941-829d-1309dcf835e9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.932092 4622 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Nov 26 11:30:00 crc kubenswrapper[4622]: I1126 11:30:00.934183 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk"] Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.016964 4622 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.017288 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38fbda9e-5203-4941-829d-1309dcf835e9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.540997 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38fbda9e-5203-4941-829d-1309dcf835e9","Type":"ContainerDied","Data":"258458f19cadb76c3baa8e12bf24bb570c4feb6801ef0f531fee545774a2c45c"} Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.541312 4622 scope.go:117] "RemoveContainer" containerID="a8b7ed8165d741ca74012d4ee2b84f6c0cfb640926e04d1fedef8bbf4bb5145f" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.541057 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.542934 4622 generic.go:334] "Generic (PLEG): container finished" podID="fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" containerID="0272a4f9c89a93727c43df153c31c423fba989ddb52e5b7e4508185edda667cd" exitCode=0 Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.542978 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" event={"ID":"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e","Type":"ContainerDied","Data":"0272a4f9c89a93727c43df153c31c423fba989ddb52e5b7e4508185edda667cd"} Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.542995 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" event={"ID":"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e","Type":"ContainerStarted","Data":"966e0db102190b894829f096a5912caed43c9083e1cb95d67b6f337d88f9d2e5"} Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.547028 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerDied","Data":"536c6d3ee68590a3ee610d404c45fa1623d311b72c6455e3c5300793331111de"} Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.547081 4622 generic.go:334] "Generic (PLEG): container finished" podID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerID="536c6d3ee68590a3ee610d404c45fa1623d311b72c6455e3c5300793331111de" exitCode=0 Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.547136 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"35d9b078-6e67-49d3-a82d-c0b0bc289904","Type":"ContainerDied","Data":"537302411caf0095aac666ffd35217cbc164e6a161750f2fe19319ad0662982e"} Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.547166 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="537302411caf0095aac666ffd35217cbc164e6a161750f2fe19319ad0662982e" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.555185 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.573867 4622 scope.go:117] "RemoveContainer" containerID="5deadae99260bef731001a5f824300cc52b829eee6bb202d2644be4131ea1279" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.605050 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.614068 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.635352 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:30:01 crc kubenswrapper[4622]: E1126 11:30:01.635788 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.635807 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: E1126 11:30:01.635824 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="setup-container" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.635830 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="setup-container" Nov 26 11:30:01 crc kubenswrapper[4622]: E1126 11:30:01.635847 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="setup-container" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.635852 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="setup-container" Nov 26 11:30:01 crc kubenswrapper[4622]: E1126 11:30:01.635861 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.635867 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.636051 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.636072 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" containerName="rabbitmq" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.637093 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.641755 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642050 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642095 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642179 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642199 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642231 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6jzh\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642265 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642284 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642320 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642361 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642396 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.642419 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.648733 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.649691 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.650017 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.650643 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh" (OuterVolumeSpecName: "kube-api-access-z6jzh") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "kube-api-access-z6jzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.650773 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.651023 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.651155 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-z25df" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.651287 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.651400 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.654908 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.655552 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.660487 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.665107 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.667562 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info" (OuterVolumeSpecName: "pod-info") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.671780 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.681680 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data" (OuterVolumeSpecName: "config-data") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.719003 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf" (OuterVolumeSpecName: "server-conf") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.744696 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745367 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") pod \"35d9b078-6e67-49d3-a82d-c0b0bc289904\" (UID: \"35d9b078-6e67-49d3-a82d-c0b0bc289904\") " Nov 26 11:30:01 crc kubenswrapper[4622]: W1126 11:30:01.745524 4622 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/35d9b078-6e67-49d3-a82d-c0b0bc289904/volumes/kubernetes.io~projected/rabbitmq-confd Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745559 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "35d9b078-6e67-49d3-a82d-c0b0bc289904" (UID: "35d9b078-6e67-49d3-a82d-c0b0bc289904"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745849 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745880 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-server-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745913 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745941 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/556a2699-b555-4ae3-8aa0-8c545af95e25-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.745990 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/556a2699-b555-4ae3-8aa0-8c545af95e25-pod-info\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746018 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746204 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746316 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746415 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-config-data\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746457 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g2jn\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-kube-api-access-9g2jn\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746486 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746859 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746912 4622 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746929 4622 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-server-conf\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746939 4622 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35d9b078-6e67-49d3-a82d-c0b0bc289904-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746949 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6jzh\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-kube-api-access-z6jzh\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746961 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746969 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746981 4622 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/35d9b078-6e67-49d3-a82d-c0b0bc289904-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746989 4622 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35d9b078-6e67-49d3-a82d-c0b0bc289904-pod-info\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.746999 4622 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.747006 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35d9b078-6e67-49d3-a82d-c0b0bc289904-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.772077 4622 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849227 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849302 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849350 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-config-data\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849371 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g2jn\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-kube-api-access-9g2jn\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849396 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849454 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849475 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-server-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849525 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849536 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849555 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/556a2699-b555-4ae3-8aa0-8c545af95e25-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849581 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/556a2699-b555-4ae3-8aa0-8c545af95e25-pod-info\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849612 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.849662 4622 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.850358 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.850430 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.850925 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.850965 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-config-data\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.851345 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/556a2699-b555-4ae3-8aa0-8c545af95e25-server-conf\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.853692 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/556a2699-b555-4ae3-8aa0-8c545af95e25-pod-info\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.853977 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.854067 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.854179 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/556a2699-b555-4ae3-8aa0-8c545af95e25-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.870608 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g2jn\" (UniqueName: \"kubernetes.io/projected/556a2699-b555-4ae3-8aa0-8c545af95e25-kube-api-access-9g2jn\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.875750 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"556a2699-b555-4ae3-8aa0-8c545af95e25\") " pod="openstack/rabbitmq-server-0" Nov 26 11:30:01 crc kubenswrapper[4622]: I1126 11:30:01.989236 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.395492 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.562522 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"556a2699-b555-4ae3-8aa0-8c545af95e25","Type":"ContainerStarted","Data":"0ee3ec162da66e6d5d66efbbbbb87c86274d9fb4615efe86e7c57606ffbaeb84"} Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.565040 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.607110 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.618267 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.624097 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.625450 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631777 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631794 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631820 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631898 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631933 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.631907 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.632082 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zf6th" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.635776 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.668869 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/28c34d7a-7439-4282-9ca3-c6f19cc47dda-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669134 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669175 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669203 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669223 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n85x\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-kube-api-access-5n85x\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669290 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669312 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669328 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669370 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/28c34d7a-7439-4282-9ca3-c6f19cc47dda-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669419 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.669452 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.719648 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35d9b078-6e67-49d3-a82d-c0b0bc289904" path="/var/lib/kubelet/pods/35d9b078-6e67-49d3-a82d-c0b0bc289904/volumes" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.722851 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38fbda9e-5203-4941-829d-1309dcf835e9" path="/var/lib/kubelet/pods/38fbda9e-5203-4941-829d-1309dcf835e9/volumes" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770796 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770871 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770893 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770931 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/28c34d7a-7439-4282-9ca3-c6f19cc47dda-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770965 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.770992 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771149 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/28c34d7a-7439-4282-9ca3-c6f19cc47dda-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771178 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771220 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771264 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771303 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n85x\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-kube-api-access-5n85x\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.771665 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.772177 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.772236 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.772692 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/28c34d7a-7439-4282-9ca3-c6f19cc47dda-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.772860 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.773528 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.776975 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.777238 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/28c34d7a-7439-4282-9ca3-c6f19cc47dda-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.778296 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.780038 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/28c34d7a-7439-4282-9ca3-c6f19cc47dda-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.789481 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n85x\" (UniqueName: \"kubernetes.io/projected/28c34d7a-7439-4282-9ca3-c6f19cc47dda-kube-api-access-5n85x\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.800109 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"28c34d7a-7439-4282-9ca3-c6f19cc47dda\") " pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.816462 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.872485 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume\") pod \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.872600 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtg95\" (UniqueName: \"kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95\") pod \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.872686 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume\") pod \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\" (UID: \"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e\") " Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.873055 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume" (OuterVolumeSpecName: "config-volume") pod "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" (UID: "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.875649 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95" (OuterVolumeSpecName: "kube-api-access-jtg95") pod "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" (UID: "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e"). InnerVolumeSpecName "kube-api-access-jtg95". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.875663 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" (UID: "fd2c94b0-fd61-4017-b33a-0e3c46b1f49e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.949198 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.974933 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.974971 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtg95\" (UniqueName: \"kubernetes.io/projected/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-kube-api-access-jtg95\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.974987 4622 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd2c94b0-fd61-4017-b33a-0e3c46b1f49e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.981048 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:02 crc kubenswrapper[4622]: E1126 11:30:02.984118 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" containerName="collect-profiles" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.984145 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" containerName="collect-profiles" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.984391 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2c94b0-fd61-4017-b33a-0e3c46b1f49e" containerName="collect-profiles" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.985315 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:02 crc kubenswrapper[4622]: I1126 11:30:02.989761 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.017964 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076052 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076357 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076456 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076497 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076539 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8rv4\" (UniqueName: \"kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.076576 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179491 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179592 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179613 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8rv4\" (UniqueName: \"kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179645 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179698 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.179783 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.181116 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.181488 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.182246 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.182608 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.182751 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.198061 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8rv4\" (UniqueName: \"kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4\") pod \"dnsmasq-dns-568675b579-sgrgv\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.306054 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.383946 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.582451 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"556a2699-b555-4ae3-8aa0-8c545af95e25","Type":"ContainerStarted","Data":"f1cdcfe9de86b31edd6ea0833b9fb76d670f3433f7e9bd62bc7b35b999500104"} Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.583590 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"28c34d7a-7439-4282-9ca3-c6f19cc47dda","Type":"ContainerStarted","Data":"dfcfd0fc222ec0d1517f6eed942e51160b89c461f9d24ae69ef18a4a4b96aa65"} Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.585897 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" event={"ID":"fd2c94b0-fd61-4017-b33a-0e3c46b1f49e","Type":"ContainerDied","Data":"966e0db102190b894829f096a5912caed43c9083e1cb95d67b6f337d88f9d2e5"} Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.585941 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="966e0db102190b894829f096a5912caed43c9083e1cb95d67b6f337d88f9d2e5" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.586030 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402610-x9qqk" Nov 26 11:30:03 crc kubenswrapper[4622]: I1126 11:30:03.795015 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:03 crc kubenswrapper[4622]: W1126 11:30:03.797712 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ec7c690_1a2b_45e2_9f9f_f739fbfb1799.slice/crio-d4c8241bd4ecb66b8b54d43d4517a699e0e755b277742622297ce4aae45ff104 WatchSource:0}: Error finding container d4c8241bd4ecb66b8b54d43d4517a699e0e755b277742622297ce4aae45ff104: Status 404 returned error can't find the container with id d4c8241bd4ecb66b8b54d43d4517a699e0e755b277742622297ce4aae45ff104 Nov 26 11:30:04 crc kubenswrapper[4622]: I1126 11:30:04.600665 4622 generic.go:334] "Generic (PLEG): container finished" podID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerID="9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462" exitCode=0 Nov 26 11:30:04 crc kubenswrapper[4622]: I1126 11:30:04.600743 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568675b579-sgrgv" event={"ID":"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799","Type":"ContainerDied","Data":"9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462"} Nov 26 11:30:04 crc kubenswrapper[4622]: I1126 11:30:04.601203 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568675b579-sgrgv" event={"ID":"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799","Type":"ContainerStarted","Data":"d4c8241bd4ecb66b8b54d43d4517a699e0e755b277742622297ce4aae45ff104"} Nov 26 11:30:04 crc kubenswrapper[4622]: I1126 11:30:04.603965 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"28c34d7a-7439-4282-9ca3-c6f19cc47dda","Type":"ContainerStarted","Data":"3289c3d5a7d4d024c570df885883c818ab7d75806fe494199505f43937daf515"} Nov 26 11:30:05 crc kubenswrapper[4622]: I1126 11:30:05.616027 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568675b579-sgrgv" event={"ID":"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799","Type":"ContainerStarted","Data":"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e"} Nov 26 11:30:05 crc kubenswrapper[4622]: I1126 11:30:05.637796 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-568675b579-sgrgv" podStartSLOduration=3.63777697 podStartE2EDuration="3.63777697s" podCreationTimestamp="2025-11-26 11:30:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:30:05.631368949 +0000 UTC m=+1165.222580462" watchObservedRunningTime="2025-11-26 11:30:05.63777697 +0000 UTC m=+1165.228988492" Nov 26 11:30:06 crc kubenswrapper[4622]: I1126 11:30:06.638572 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.307782 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.354020 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.354290 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c9b558957-lc98m" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="dnsmasq-dns" containerID="cri-o://26bd97411f6ed7055a6a61ab6fbbe3c8a8d0d3dc865dfde127e4f9fe1b5c2eea" gracePeriod=10 Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.494672 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.496334 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.503848 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581171 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581216 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581256 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581294 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581316 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.581407 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99wdh\" (UniqueName: \"kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.683494 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.683935 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684000 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684179 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99wdh\" (UniqueName: \"kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684283 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684698 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684723 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684770 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.684988 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.685316 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.685707 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.695681 4622 generic.go:334] "Generic (PLEG): container finished" podID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerID="26bd97411f6ed7055a6a61ab6fbbe3c8a8d0d3dc865dfde127e4f9fe1b5c2eea" exitCode=0 Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.695719 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9b558957-lc98m" event={"ID":"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb","Type":"ContainerDied","Data":"26bd97411f6ed7055a6a61ab6fbbe3c8a8d0d3dc865dfde127e4f9fe1b5c2eea"} Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.712908 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99wdh\" (UniqueName: \"kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh\") pod \"dnsmasq-dns-6dc44c56c-g4nc4\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.774760 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.786478 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb\") pod \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.786546 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config\") pod \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.786827 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc\") pod \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.786860 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb\") pod \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.786884 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xgjp\" (UniqueName: \"kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp\") pod \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\" (UID: \"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb\") " Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.795645 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp" (OuterVolumeSpecName: "kube-api-access-7xgjp") pod "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" (UID: "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"). InnerVolumeSpecName "kube-api-access-7xgjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.820239 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.826410 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config" (OuterVolumeSpecName: "config") pod "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" (UID: "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.837354 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" (UID: "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.837571 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" (UID: "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.844301 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" (UID: "1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.890632 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.890765 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.890836 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.890902 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:13 crc kubenswrapper[4622]: I1126 11:30:13.890953 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xgjp\" (UniqueName: \"kubernetes.io/projected/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb-kube-api-access-7xgjp\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.214636 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.718715 4622 generic.go:334] "Generic (PLEG): container finished" podID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerID="9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe" exitCode=0 Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.722941 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.731213 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" event={"ID":"76cda7d7-b0d1-4d08-93ae-70f43efd4438","Type":"ContainerDied","Data":"9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe"} Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.731254 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" event={"ID":"76cda7d7-b0d1-4d08-93ae-70f43efd4438","Type":"ContainerStarted","Data":"7d84d8113976129c6b5ff8cef6c0b210431e03d6f63aaf2c0fb6fdd2516a30b5"} Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.731266 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9b558957-lc98m" event={"ID":"1527e673-93b6-4f9a-aa73-df2b8e1f4cdb","Type":"ContainerDied","Data":"e8d741ceb21b4aba3a67b66a993e5a874dbc176a63b450d52dd830c999d54a2b"} Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.731288 4622 scope.go:117] "RemoveContainer" containerID="26bd97411f6ed7055a6a61ab6fbbe3c8a8d0d3dc865dfde127e4f9fe1b5c2eea" Nov 26 11:30:14 crc kubenswrapper[4622]: I1126 11:30:14.840827 4622 scope.go:117] "RemoveContainer" containerID="f5382a88da8456425d308915bd1014b7af35be6afeb261c548cead5553a5fa2b" Nov 26 11:30:15 crc kubenswrapper[4622]: I1126 11:30:15.734552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" event={"ID":"76cda7d7-b0d1-4d08-93ae-70f43efd4438","Type":"ContainerStarted","Data":"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9"} Nov 26 11:30:15 crc kubenswrapper[4622]: I1126 11:30:15.734838 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:15 crc kubenswrapper[4622]: I1126 11:30:15.754591 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" podStartSLOduration=2.754573214 podStartE2EDuration="2.754573214s" podCreationTimestamp="2025-11-26 11:30:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:30:15.749959067 +0000 UTC m=+1175.341170590" watchObservedRunningTime="2025-11-26 11:30:15.754573214 +0000 UTC m=+1175.345784737" Nov 26 11:30:23 crc kubenswrapper[4622]: I1126 11:30:23.821596 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:30:23 crc kubenswrapper[4622]: I1126 11:30:23.867389 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:23 crc kubenswrapper[4622]: I1126 11:30:23.867664 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-568675b579-sgrgv" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="dnsmasq-dns" containerID="cri-o://5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e" gracePeriod=10 Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.255095 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.287354 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.287718 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8rv4\" (UniqueName: \"kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.287814 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.287929 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.287982 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.288005 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.298154 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4" (OuterVolumeSpecName: "kube-api-access-k8rv4") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "kube-api-access-k8rv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.324881 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.325542 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: E1126 11:30:24.328135 4622 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb podName:1ec7c690-1a2b-45e2-9f9f-f739fbfb1799 nodeName:}" failed. No retries permitted until 2025-11-26 11:30:24.828111509 +0000 UTC m=+1184.419323032 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovsdbserver-sb" (UniqueName: "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799") : error deleting /var/lib/kubelet/pods/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799/volume-subpaths: remove /var/lib/kubelet/pods/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799/volume-subpaths: no such file or directory Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.328255 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config" (OuterVolumeSpecName: "config") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.328316 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.390901 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.390936 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.390950 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.390959 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8rv4\" (UniqueName: \"kubernetes.io/projected/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-kube-api-access-k8rv4\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.390968 4622 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.811602 4622 generic.go:334] "Generic (PLEG): container finished" podID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerID="5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e" exitCode=0 Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.811656 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568675b579-sgrgv" event={"ID":"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799","Type":"ContainerDied","Data":"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e"} Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.811692 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568675b579-sgrgv" event={"ID":"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799","Type":"ContainerDied","Data":"d4c8241bd4ecb66b8b54d43d4517a699e0e755b277742622297ce4aae45ff104"} Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.811710 4622 scope.go:117] "RemoveContainer" containerID="5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.811852 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568675b579-sgrgv" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.831119 4622 scope.go:117] "RemoveContainer" containerID="9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.848291 4622 scope.go:117] "RemoveContainer" containerID="5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e" Nov 26 11:30:24 crc kubenswrapper[4622]: E1126 11:30:24.849007 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e\": container with ID starting with 5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e not found: ID does not exist" containerID="5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.849072 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e"} err="failed to get container status \"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e\": rpc error: code = NotFound desc = could not find container \"5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e\": container with ID starting with 5e2b29ff8919c0578c8bb27d0ae0c0a7d3776a8542864ececa3ea1692846dc1e not found: ID does not exist" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.849105 4622 scope.go:117] "RemoveContainer" containerID="9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462" Nov 26 11:30:24 crc kubenswrapper[4622]: E1126 11:30:24.849451 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462\": container with ID starting with 9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462 not found: ID does not exist" containerID="9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.849529 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462"} err="failed to get container status \"9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462\": rpc error: code = NotFound desc = could not find container \"9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462\": container with ID starting with 9e94dc13cd716c4f6d4c8cba09e0fcf66c743115e5a2bbdb15ab628544cb7462 not found: ID does not exist" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.900859 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") pod \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\" (UID: \"1ec7c690-1a2b-45e2-9f9f-f739fbfb1799\") " Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.901295 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" (UID: "1ec7c690-1a2b-45e2-9f9f-f739fbfb1799"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:30:24 crc kubenswrapper[4622]: I1126 11:30:24.902071 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:25 crc kubenswrapper[4622]: I1126 11:30:25.139960 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:25 crc kubenswrapper[4622]: I1126 11:30:25.147045 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-568675b579-sgrgv"] Nov 26 11:30:26 crc kubenswrapper[4622]: I1126 11:30:26.716091 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" path="/var/lib/kubelet/pods/1ec7c690-1a2b-45e2-9f9f-f739fbfb1799/volumes" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.472894 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g"] Nov 26 11:30:29 crc kubenswrapper[4622]: E1126 11:30:29.473819 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="init" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.473839 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="init" Nov 26 11:30:29 crc kubenswrapper[4622]: E1126 11:30:29.473853 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.473858 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: E1126 11:30:29.473882 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="init" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.473888 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="init" Nov 26 11:30:29 crc kubenswrapper[4622]: E1126 11:30:29.473898 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.473905 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.474129 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec7c690-1a2b-45e2-9f9f-f739fbfb1799" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.474154 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" containerName="dnsmasq-dns" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.475083 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.477750 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.477878 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.478110 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.478838 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.484680 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g"] Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.587472 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jx5n\" (UniqueName: \"kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.587551 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.587623 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.587721 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.689525 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.690100 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jx5n\" (UniqueName: \"kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.690221 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.690347 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.694001 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.694718 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.695174 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.704007 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jx5n\" (UniqueName: \"kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:29 crc kubenswrapper[4622]: I1126 11:30:29.798915 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:30 crc kubenswrapper[4622]: I1126 11:30:30.246204 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g"] Nov 26 11:30:30 crc kubenswrapper[4622]: W1126 11:30:30.246951 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c88ddbe_6a37_4a2c_b6ed_1aa193644050.slice/crio-6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57 WatchSource:0}: Error finding container 6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57: Status 404 returned error can't find the container with id 6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57 Nov 26 11:30:30 crc kubenswrapper[4622]: I1126 11:30:30.249533 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:30:30 crc kubenswrapper[4622]: I1126 11:30:30.864521 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" event={"ID":"3c88ddbe-6a37-4a2c-b6ed-1aa193644050","Type":"ContainerStarted","Data":"6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57"} Nov 26 11:30:35 crc kubenswrapper[4622]: I1126 11:30:35.924438 4622 generic.go:334] "Generic (PLEG): container finished" podID="556a2699-b555-4ae3-8aa0-8c545af95e25" containerID="f1cdcfe9de86b31edd6ea0833b9fb76d670f3433f7e9bd62bc7b35b999500104" exitCode=0 Nov 26 11:30:35 crc kubenswrapper[4622]: I1126 11:30:35.924732 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"556a2699-b555-4ae3-8aa0-8c545af95e25","Type":"ContainerDied","Data":"f1cdcfe9de86b31edd6ea0833b9fb76d670f3433f7e9bd62bc7b35b999500104"} Nov 26 11:30:36 crc kubenswrapper[4622]: I1126 11:30:36.933805 4622 generic.go:334] "Generic (PLEG): container finished" podID="28c34d7a-7439-4282-9ca3-c6f19cc47dda" containerID="3289c3d5a7d4d024c570df885883c818ab7d75806fe494199505f43937daf515" exitCode=0 Nov 26 11:30:36 crc kubenswrapper[4622]: I1126 11:30:36.933853 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"28c34d7a-7439-4282-9ca3-c6f19cc47dda","Type":"ContainerDied","Data":"3289c3d5a7d4d024c570df885883c818ab7d75806fe494199505f43937daf515"} Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.944066 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" event={"ID":"3c88ddbe-6a37-4a2c-b6ed-1aa193644050","Type":"ContainerStarted","Data":"4ad1d8661bdeb2025409de8b92c260de6db9221dff90506e0044c5aae6500a6a"} Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.947243 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"556a2699-b555-4ae3-8aa0-8c545af95e25","Type":"ContainerStarted","Data":"e88e696ec569356161cfa188774c251ea9a2d14bb0a1ad2da63c37e2bee98294"} Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.947521 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.949857 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"28c34d7a-7439-4282-9ca3-c6f19cc47dda","Type":"ContainerStarted","Data":"a1c6305e883e9bc20ba2f60f2e98bbb0be9d289d0caa5681913b7a8ff53feae6"} Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.950080 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.959913 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" podStartSLOduration=2.111817135 podStartE2EDuration="8.959889863s" podCreationTimestamp="2025-11-26 11:30:29 +0000 UTC" firstStartedPulling="2025-11-26 11:30:30.249300005 +0000 UTC m=+1189.840511526" lastFinishedPulling="2025-11-26 11:30:37.097372732 +0000 UTC m=+1196.688584254" observedRunningTime="2025-11-26 11:30:37.957338439 +0000 UTC m=+1197.548549961" watchObservedRunningTime="2025-11-26 11:30:37.959889863 +0000 UTC m=+1197.551101386" Nov 26 11:30:37 crc kubenswrapper[4622]: I1126 11:30:37.984118 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=35.984100007 podStartE2EDuration="35.984100007s" podCreationTimestamp="2025-11-26 11:30:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:30:37.978219 +0000 UTC m=+1197.569430522" watchObservedRunningTime="2025-11-26 11:30:37.984100007 +0000 UTC m=+1197.575311529" Nov 26 11:30:38 crc kubenswrapper[4622]: I1126 11:30:38.003315 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.0032958 podStartE2EDuration="37.0032958s" podCreationTimestamp="2025-11-26 11:30:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:30:37.995366839 +0000 UTC m=+1197.586578371" watchObservedRunningTime="2025-11-26 11:30:38.0032958 +0000 UTC m=+1197.594507322" Nov 26 11:30:44 crc kubenswrapper[4622]: I1126 11:30:44.824377 4622 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod1527e673-93b6-4f9a-aa73-df2b8e1f4cdb"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod1527e673-93b6-4f9a-aa73-df2b8e1f4cdb] : Timed out while waiting for systemd to remove kubepods-besteffort-pod1527e673_93b6_4f9a_aa73_df2b8e1f4cdb.slice" Nov 26 11:30:44 crc kubenswrapper[4622]: E1126 11:30:44.824982 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod1527e673-93b6-4f9a-aa73-df2b8e1f4cdb] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod1527e673-93b6-4f9a-aa73-df2b8e1f4cdb] : Timed out while waiting for systemd to remove kubepods-besteffort-pod1527e673_93b6_4f9a_aa73_df2b8e1f4cdb.slice" pod="openstack/dnsmasq-dns-c9b558957-lc98m" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" Nov 26 11:30:45 crc kubenswrapper[4622]: I1126 11:30:45.007176 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9b558957-lc98m" Nov 26 11:30:45 crc kubenswrapper[4622]: I1126 11:30:45.049061 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:30:45 crc kubenswrapper[4622]: I1126 11:30:45.055844 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c9b558957-lc98m"] Nov 26 11:30:46 crc kubenswrapper[4622]: I1126 11:30:46.714715 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1527e673-93b6-4f9a-aa73-df2b8e1f4cdb" path="/var/lib/kubelet/pods/1527e673-93b6-4f9a-aa73-df2b8e1f4cdb/volumes" Nov 26 11:30:49 crc kubenswrapper[4622]: I1126 11:30:49.036991 4622 generic.go:334] "Generic (PLEG): container finished" podID="3c88ddbe-6a37-4a2c-b6ed-1aa193644050" containerID="4ad1d8661bdeb2025409de8b92c260de6db9221dff90506e0044c5aae6500a6a" exitCode=0 Nov 26 11:30:49 crc kubenswrapper[4622]: I1126 11:30:49.037068 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" event={"ID":"3c88ddbe-6a37-4a2c-b6ed-1aa193644050","Type":"ContainerDied","Data":"4ad1d8661bdeb2025409de8b92c260de6db9221dff90506e0044c5aae6500a6a"} Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.335377 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.402513 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jx5n\" (UniqueName: \"kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n\") pod \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.402582 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key\") pod \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.402711 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory\") pod \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.402782 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle\") pod \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\" (UID: \"3c88ddbe-6a37-4a2c-b6ed-1aa193644050\") " Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.407885 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n" (OuterVolumeSpecName: "kube-api-access-6jx5n") pod "3c88ddbe-6a37-4a2c-b6ed-1aa193644050" (UID: "3c88ddbe-6a37-4a2c-b6ed-1aa193644050"). InnerVolumeSpecName "kube-api-access-6jx5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.408156 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3c88ddbe-6a37-4a2c-b6ed-1aa193644050" (UID: "3c88ddbe-6a37-4a2c-b6ed-1aa193644050"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.423780 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory" (OuterVolumeSpecName: "inventory") pod "3c88ddbe-6a37-4a2c-b6ed-1aa193644050" (UID: "3c88ddbe-6a37-4a2c-b6ed-1aa193644050"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.425106 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3c88ddbe-6a37-4a2c-b6ed-1aa193644050" (UID: "3c88ddbe-6a37-4a2c-b6ed-1aa193644050"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.504800 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.504831 4622 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.504843 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jx5n\" (UniqueName: \"kubernetes.io/projected/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-kube-api-access-6jx5n\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:50 crc kubenswrapper[4622]: I1126 11:30:50.504852 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c88ddbe-6a37-4a2c-b6ed-1aa193644050-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.052944 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" event={"ID":"3c88ddbe-6a37-4a2c-b6ed-1aa193644050","Type":"ContainerDied","Data":"6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57"} Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.053179 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6979c59e0b3d9894ca894f8f5a701df714f597c3446cc7ecc29c3ba7d49ece57" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.052977 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.152181 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8"] Nov 26 11:30:51 crc kubenswrapper[4622]: E1126 11:30:51.152570 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c88ddbe-6a37-4a2c-b6ed-1aa193644050" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.152588 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c88ddbe-6a37-4a2c-b6ed-1aa193644050" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.152799 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c88ddbe-6a37-4a2c-b6ed-1aa193644050" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.153351 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.155034 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.155414 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.155918 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.161192 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.161297 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8"] Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.215638 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.215715 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghphw\" (UniqueName: \"kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.215803 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.216045 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.317642 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.317704 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghphw\" (UniqueName: \"kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.317808 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.317855 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.322417 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.326459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.327541 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.333467 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghphw\" (UniqueName: \"kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.468447 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.912185 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8"] Nov 26 11:30:51 crc kubenswrapper[4622]: W1126 11:30:51.915106 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01028f5e_ea5c_415f_9c3b_bdf59f457db9.slice/crio-5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019 WatchSource:0}: Error finding container 5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019: Status 404 returned error can't find the container with id 5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019 Nov 26 11:30:51 crc kubenswrapper[4622]: I1126 11:30:51.992752 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 26 11:30:52 crc kubenswrapper[4622]: I1126 11:30:52.064600 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" event={"ID":"01028f5e-ea5c-415f-9c3b-bdf59f457db9","Type":"ContainerStarted","Data":"5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019"} Nov 26 11:30:52 crc kubenswrapper[4622]: I1126 11:30:52.953135 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 26 11:30:53 crc kubenswrapper[4622]: I1126 11:30:53.071739 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" event={"ID":"01028f5e-ea5c-415f-9c3b-bdf59f457db9","Type":"ContainerStarted","Data":"7c216adaf12f7b584e3ca83221992512c654bc3ee77a2b4b46db329f7ef14cbd"} Nov 26 11:30:53 crc kubenswrapper[4622]: I1126 11:30:53.090455 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" podStartSLOduration=1.6010596700000002 podStartE2EDuration="2.090441399s" podCreationTimestamp="2025-11-26 11:30:51 +0000 UTC" firstStartedPulling="2025-11-26 11:30:51.917429998 +0000 UTC m=+1211.508641519" lastFinishedPulling="2025-11-26 11:30:52.406811726 +0000 UTC m=+1211.998023248" observedRunningTime="2025-11-26 11:30:53.086249098 +0000 UTC m=+1212.677460620" watchObservedRunningTime="2025-11-26 11:30:53.090441399 +0000 UTC m=+1212.681652921" Nov 26 11:31:15 crc kubenswrapper[4622]: I1126 11:31:15.199166 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:31:15 crc kubenswrapper[4622]: I1126 11:31:15.199829 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:31:42 crc kubenswrapper[4622]: I1126 11:31:42.749354 4622 scope.go:117] "RemoveContainer" containerID="041b7b43d0fdde5d09001c58a1e433023ac172b898a5c6b1424d9cc25714de94" Nov 26 11:31:45 crc kubenswrapper[4622]: I1126 11:31:45.198696 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:31:45 crc kubenswrapper[4622]: I1126 11:31:45.199461 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.198687 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.199366 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.199436 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.200760 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.200829 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119" gracePeriod=600 Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.792725 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119" exitCode=0 Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.792786 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119"} Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.793078 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c"} Nov 26 11:32:15 crc kubenswrapper[4622]: I1126 11:32:15.793107 4622 scope.go:117] "RemoveContainer" containerID="c9f0579c71b4854739675c5824d6deebe369961efeefefef06d1fd31216bd3f4" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.791416 4622 scope.go:117] "RemoveContainer" containerID="357e8330b3343687d5c623f7c14074b87ceb3aff24ca54686d204844f68a0324" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.811990 4622 scope.go:117] "RemoveContainer" containerID="6a9788b70c533f22ff68cc7de8b27db1f9358b2b93f9f9f49b6381dc67dc804b" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.840772 4622 scope.go:117] "RemoveContainer" containerID="09f037d1efcc2af40b89921183d8b28f542b87830b3b558ff41f85815620e68b" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.883042 4622 scope.go:117] "RemoveContainer" containerID="536c6d3ee68590a3ee610d404c45fa1623d311b72c6455e3c5300793331111de" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.910584 4622 scope.go:117] "RemoveContainer" containerID="1f665d889e17bcc3173a9036e22e23d9ef982403ad36ef20072751070076b0ef" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.939853 4622 scope.go:117] "RemoveContainer" containerID="e68baed220f32ac667d7db1f9ad14c735c05432101aca2601759f038799f6635" Nov 26 11:32:42 crc kubenswrapper[4622]: I1126 11:32:42.964766 4622 scope.go:117] "RemoveContainer" containerID="52d726569743b69d2469fd106d8b307ba882c9c7a5598c07db3c7de049deffb4" Nov 26 11:34:11 crc kubenswrapper[4622]: I1126 11:34:11.781972 4622 generic.go:334] "Generic (PLEG): container finished" podID="01028f5e-ea5c-415f-9c3b-bdf59f457db9" containerID="7c216adaf12f7b584e3ca83221992512c654bc3ee77a2b4b46db329f7ef14cbd" exitCode=0 Nov 26 11:34:11 crc kubenswrapper[4622]: I1126 11:34:11.782051 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" event={"ID":"01028f5e-ea5c-415f-9c3b-bdf59f457db9","Type":"ContainerDied","Data":"7c216adaf12f7b584e3ca83221992512c654bc3ee77a2b4b46db329f7ef14cbd"} Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.114109 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.128933 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle\") pod \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.128997 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghphw\" (UniqueName: \"kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw\") pod \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.129127 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key\") pod \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.129166 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory\") pod \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\" (UID: \"01028f5e-ea5c-415f-9c3b-bdf59f457db9\") " Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.134579 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw" (OuterVolumeSpecName: "kube-api-access-ghphw") pod "01028f5e-ea5c-415f-9c3b-bdf59f457db9" (UID: "01028f5e-ea5c-415f-9c3b-bdf59f457db9"). InnerVolumeSpecName "kube-api-access-ghphw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.134692 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "01028f5e-ea5c-415f-9c3b-bdf59f457db9" (UID: "01028f5e-ea5c-415f-9c3b-bdf59f457db9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.149954 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory" (OuterVolumeSpecName: "inventory") pod "01028f5e-ea5c-415f-9c3b-bdf59f457db9" (UID: "01028f5e-ea5c-415f-9c3b-bdf59f457db9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.151967 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "01028f5e-ea5c-415f-9c3b-bdf59f457db9" (UID: "01028f5e-ea5c-415f-9c3b-bdf59f457db9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.231190 4622 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.231216 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghphw\" (UniqueName: \"kubernetes.io/projected/01028f5e-ea5c-415f-9c3b-bdf59f457db9-kube-api-access-ghphw\") on node \"crc\" DevicePath \"\"" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.231227 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.231236 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01028f5e-ea5c-415f-9c3b-bdf59f457db9-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.796388 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" event={"ID":"01028f5e-ea5c-415f-9c3b-bdf59f457db9","Type":"ContainerDied","Data":"5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019"} Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.796436 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.796465 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ca7b58d88118654d784fad144ba9abec19f815f3c9801a15906d14d83748019" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.865055 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p"] Nov 26 11:34:13 crc kubenswrapper[4622]: E1126 11:34:13.865875 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01028f5e-ea5c-415f-9c3b-bdf59f457db9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.865902 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="01028f5e-ea5c-415f-9c3b-bdf59f457db9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.866313 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="01028f5e-ea5c-415f-9c3b-bdf59f457db9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.867546 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.870346 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.870439 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.870786 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.873776 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.879901 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p"] Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.948956 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.949454 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:13 crc kubenswrapper[4622]: I1126 11:34:13.949998 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrqks\" (UniqueName: \"kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.053092 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrqks\" (UniqueName: \"kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.053311 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.053494 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.058217 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.058300 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.070447 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrqks\" (UniqueName: \"kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.186549 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.661630 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p"] Nov 26 11:34:14 crc kubenswrapper[4622]: I1126 11:34:14.808249 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" event={"ID":"5ec8f07f-cde3-45c1-a9b1-2d21751b3026","Type":"ContainerStarted","Data":"004ca29bb80b484b30db4bc930a0693b24cd926d72822b3daa799a4030a818a9"} Nov 26 11:34:15 crc kubenswrapper[4622]: I1126 11:34:15.199708 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:34:15 crc kubenswrapper[4622]: I1126 11:34:15.199778 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:34:15 crc kubenswrapper[4622]: I1126 11:34:15.815679 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" event={"ID":"5ec8f07f-cde3-45c1-a9b1-2d21751b3026","Type":"ContainerStarted","Data":"071aaf0f86fbc29ef8b509e2f6e7386a0be606fa6768f3925f10a41f3ac8223f"} Nov 26 11:34:15 crc kubenswrapper[4622]: I1126 11:34:15.831134 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" podStartSLOduration=2.161087691 podStartE2EDuration="2.831120804s" podCreationTimestamp="2025-11-26 11:34:13 +0000 UTC" firstStartedPulling="2025-11-26 11:34:14.671053801 +0000 UTC m=+1414.262265323" lastFinishedPulling="2025-11-26 11:34:15.341086913 +0000 UTC m=+1414.932298436" observedRunningTime="2025-11-26 11:34:15.826484316 +0000 UTC m=+1415.417695837" watchObservedRunningTime="2025-11-26 11:34:15.831120804 +0000 UTC m=+1415.422332326" Nov 26 11:34:43 crc kubenswrapper[4622]: I1126 11:34:43.098936 4622 scope.go:117] "RemoveContainer" containerID="35e7aa4109463f397bed20869781e2958b160f7927640379ce4a2e49129deca7" Nov 26 11:34:43 crc kubenswrapper[4622]: I1126 11:34:43.128527 4622 scope.go:117] "RemoveContainer" containerID="cc1b0d51a08e90923d14e8fd416207a1bd85e357b11d03489b66d7f6b9da6ac8" Nov 26 11:34:45 crc kubenswrapper[4622]: I1126 11:34:45.198668 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:34:45 crc kubenswrapper[4622]: I1126 11:34:45.199270 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.198750 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.199141 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.199183 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.199856 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.199911 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" gracePeriod=600 Nov 26 11:35:15 crc kubenswrapper[4622]: E1126 11:35:15.317089 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.333272 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" exitCode=0 Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.333312 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c"} Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.333345 4622 scope.go:117] "RemoveContainer" containerID="8b880948f29e69f7cc8340036aff51e00d5b43fc0f918a98ac809109e6a99119" Nov 26 11:35:15 crc kubenswrapper[4622]: I1126 11:35:15.333882 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:35:15 crc kubenswrapper[4622]: E1126 11:35:15.334146 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:35:16 crc kubenswrapper[4622]: I1126 11:35:16.342412 4622 generic.go:334] "Generic (PLEG): container finished" podID="5ec8f07f-cde3-45c1-a9b1-2d21751b3026" containerID="071aaf0f86fbc29ef8b509e2f6e7386a0be606fa6768f3925f10a41f3ac8223f" exitCode=0 Nov 26 11:35:16 crc kubenswrapper[4622]: I1126 11:35:16.342476 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" event={"ID":"5ec8f07f-cde3-45c1-a9b1-2d21751b3026","Type":"ContainerDied","Data":"071aaf0f86fbc29ef8b509e2f6e7386a0be606fa6768f3925f10a41f3ac8223f"} Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.663698 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.706735 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory\") pod \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.706841 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrqks\" (UniqueName: \"kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks\") pod \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.706882 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key\") pod \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\" (UID: \"5ec8f07f-cde3-45c1-a9b1-2d21751b3026\") " Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.711318 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks" (OuterVolumeSpecName: "kube-api-access-jrqks") pod "5ec8f07f-cde3-45c1-a9b1-2d21751b3026" (UID: "5ec8f07f-cde3-45c1-a9b1-2d21751b3026"). InnerVolumeSpecName "kube-api-access-jrqks". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.727183 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5ec8f07f-cde3-45c1-a9b1-2d21751b3026" (UID: "5ec8f07f-cde3-45c1-a9b1-2d21751b3026"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.728594 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory" (OuterVolumeSpecName: "inventory") pod "5ec8f07f-cde3-45c1-a9b1-2d21751b3026" (UID: "5ec8f07f-cde3-45c1-a9b1-2d21751b3026"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.808802 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.808830 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrqks\" (UniqueName: \"kubernetes.io/projected/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-kube-api-access-jrqks\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:17 crc kubenswrapper[4622]: I1126 11:35:17.808841 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec8f07f-cde3-45c1-a9b1-2d21751b3026-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.360665 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" event={"ID":"5ec8f07f-cde3-45c1-a9b1-2d21751b3026","Type":"ContainerDied","Data":"004ca29bb80b484b30db4bc930a0693b24cd926d72822b3daa799a4030a818a9"} Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.360696 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="004ca29bb80b484b30db4bc930a0693b24cd926d72822b3daa799a4030a818a9" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.360711 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.410436 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5"] Nov 26 11:35:18 crc kubenswrapper[4622]: E1126 11:35:18.410807 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec8f07f-cde3-45c1-a9b1-2d21751b3026" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.410825 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec8f07f-cde3-45c1-a9b1-2d21751b3026" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.411029 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec8f07f-cde3-45c1-a9b1-2d21751b3026" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.411585 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.413363 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.413620 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.413751 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.414645 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.418678 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5"] Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.520271 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.520595 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.520716 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swgrs\" (UniqueName: \"kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.621922 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.621996 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.622051 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swgrs\" (UniqueName: \"kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.626146 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.627696 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.635823 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swgrs\" (UniqueName: \"kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:18 crc kubenswrapper[4622]: I1126 11:35:18.738600 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:19 crc kubenswrapper[4622]: I1126 11:35:19.159461 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5"] Nov 26 11:35:19 crc kubenswrapper[4622]: I1126 11:35:19.368123 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" event={"ID":"70174cd5-313d-4acf-afd4-6da6f13161ad","Type":"ContainerStarted","Data":"aa7c99acdb02656caada99ee2039d66da25b31a1569b492e42cd283175d2ead8"} Nov 26 11:35:20 crc kubenswrapper[4622]: I1126 11:35:20.376877 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" event={"ID":"70174cd5-313d-4acf-afd4-6da6f13161ad","Type":"ContainerStarted","Data":"40baf503f7d59c9feba672a8650dfb4a9378655f0feb29ec760edb2ab4e69a51"} Nov 26 11:35:20 crc kubenswrapper[4622]: I1126 11:35:20.393202 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" podStartSLOduration=1.784173472 podStartE2EDuration="2.393186392s" podCreationTimestamp="2025-11-26 11:35:18 +0000 UTC" firstStartedPulling="2025-11-26 11:35:19.164706178 +0000 UTC m=+1478.755917700" lastFinishedPulling="2025-11-26 11:35:19.773719098 +0000 UTC m=+1479.364930620" observedRunningTime="2025-11-26 11:35:20.390937259 +0000 UTC m=+1479.982148781" watchObservedRunningTime="2025-11-26 11:35:20.393186392 +0000 UTC m=+1479.984397914" Nov 26 11:35:24 crc kubenswrapper[4622]: I1126 11:35:24.409147 4622 generic.go:334] "Generic (PLEG): container finished" podID="70174cd5-313d-4acf-afd4-6da6f13161ad" containerID="40baf503f7d59c9feba672a8650dfb4a9378655f0feb29ec760edb2ab4e69a51" exitCode=0 Nov 26 11:35:24 crc kubenswrapper[4622]: I1126 11:35:24.409238 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" event={"ID":"70174cd5-313d-4acf-afd4-6da6f13161ad","Type":"ContainerDied","Data":"40baf503f7d59c9feba672a8650dfb4a9378655f0feb29ec760edb2ab4e69a51"} Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.707033 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:35:25 crc kubenswrapper[4622]: E1126 11:35:25.707886 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.745512 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.869309 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key\") pod \"70174cd5-313d-4acf-afd4-6da6f13161ad\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.869387 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory\") pod \"70174cd5-313d-4acf-afd4-6da6f13161ad\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.869572 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swgrs\" (UniqueName: \"kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs\") pod \"70174cd5-313d-4acf-afd4-6da6f13161ad\" (UID: \"70174cd5-313d-4acf-afd4-6da6f13161ad\") " Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.875545 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs" (OuterVolumeSpecName: "kube-api-access-swgrs") pod "70174cd5-313d-4acf-afd4-6da6f13161ad" (UID: "70174cd5-313d-4acf-afd4-6da6f13161ad"). InnerVolumeSpecName "kube-api-access-swgrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.893654 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory" (OuterVolumeSpecName: "inventory") pod "70174cd5-313d-4acf-afd4-6da6f13161ad" (UID: "70174cd5-313d-4acf-afd4-6da6f13161ad"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.896444 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "70174cd5-313d-4acf-afd4-6da6f13161ad" (UID: "70174cd5-313d-4acf-afd4-6da6f13161ad"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.972456 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swgrs\" (UniqueName: \"kubernetes.io/projected/70174cd5-313d-4acf-afd4-6da6f13161ad-kube-api-access-swgrs\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.972486 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:25 crc kubenswrapper[4622]: I1126 11:35:25.972496 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70174cd5-313d-4acf-afd4-6da6f13161ad-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.434435 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" event={"ID":"70174cd5-313d-4acf-afd4-6da6f13161ad","Type":"ContainerDied","Data":"aa7c99acdb02656caada99ee2039d66da25b31a1569b492e42cd283175d2ead8"} Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.434482 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.434533 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa7c99acdb02656caada99ee2039d66da25b31a1569b492e42cd283175d2ead8" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.488885 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz"] Nov 26 11:35:26 crc kubenswrapper[4622]: E1126 11:35:26.489248 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70174cd5-313d-4acf-afd4-6da6f13161ad" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.489271 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="70174cd5-313d-4acf-afd4-6da6f13161ad" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.489464 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="70174cd5-313d-4acf-afd4-6da6f13161ad" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.490037 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.493118 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.493183 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.493274 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.493458 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.499618 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz"] Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.586018 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svcck\" (UniqueName: \"kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.586475 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.586672 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.688827 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svcck\" (UniqueName: \"kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.688905 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.689021 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.692548 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.692562 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.704238 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svcck\" (UniqueName: \"kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dz7hz\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:26 crc kubenswrapper[4622]: I1126 11:35:26.807107 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:27 crc kubenswrapper[4622]: I1126 11:35:27.276112 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz"] Nov 26 11:35:27 crc kubenswrapper[4622]: I1126 11:35:27.446985 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" event={"ID":"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4","Type":"ContainerStarted","Data":"a601a1a25982b9c99e5710fbcad0425fc1b089a1bef957dbfd50c67d734ccea1"} Nov 26 11:35:28 crc kubenswrapper[4622]: I1126 11:35:28.456457 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" event={"ID":"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4","Type":"ContainerStarted","Data":"65a525ae10851f73fcb96b845f1d2c380cbbd3e4698790249966932536c5b9b0"} Nov 26 11:35:28 crc kubenswrapper[4622]: I1126 11:35:28.472491 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" podStartSLOduration=1.88571094 podStartE2EDuration="2.472475535s" podCreationTimestamp="2025-11-26 11:35:26 +0000 UTC" firstStartedPulling="2025-11-26 11:35:27.279664164 +0000 UTC m=+1486.870875686" lastFinishedPulling="2025-11-26 11:35:27.86642876 +0000 UTC m=+1487.457640281" observedRunningTime="2025-11-26 11:35:28.469516641 +0000 UTC m=+1488.060728163" watchObservedRunningTime="2025-11-26 11:35:28.472475535 +0000 UTC m=+1488.063687057" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.519602 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.522013 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.547590 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.622253 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.622355 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.622727 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dnlh\" (UniqueName: \"kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.725131 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dnlh\" (UniqueName: \"kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.725246 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.725279 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.725892 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.725914 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.743591 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dnlh\" (UniqueName: \"kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh\") pod \"certified-operators-q8nm7\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:36 crc kubenswrapper[4622]: I1126 11:35:36.851420 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:37 crc kubenswrapper[4622]: I1126 11:35:37.315570 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:37 crc kubenswrapper[4622]: W1126 11:35:37.318259 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod778729ac_7836_4cec_897b_3d1144defef0.slice/crio-cae34fc24ed6785d55c96f7d7990c17015d0a44d7dcfa046fd2889e751fbe72a WatchSource:0}: Error finding container cae34fc24ed6785d55c96f7d7990c17015d0a44d7dcfa046fd2889e751fbe72a: Status 404 returned error can't find the container with id cae34fc24ed6785d55c96f7d7990c17015d0a44d7dcfa046fd2889e751fbe72a Nov 26 11:35:37 crc kubenswrapper[4622]: I1126 11:35:37.581880 4622 generic.go:334] "Generic (PLEG): container finished" podID="778729ac-7836-4cec-897b-3d1144defef0" containerID="8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7" exitCode=0 Nov 26 11:35:37 crc kubenswrapper[4622]: I1126 11:35:37.581950 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerDied","Data":"8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7"} Nov 26 11:35:37 crc kubenswrapper[4622]: I1126 11:35:37.582008 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerStarted","Data":"cae34fc24ed6785d55c96f7d7990c17015d0a44d7dcfa046fd2889e751fbe72a"} Nov 26 11:35:37 crc kubenswrapper[4622]: I1126 11:35:37.585333 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:35:38 crc kubenswrapper[4622]: I1126 11:35:38.592210 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerStarted","Data":"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b"} Nov 26 11:35:38 crc kubenswrapper[4622]: I1126 11:35:38.706610 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:35:38 crc kubenswrapper[4622]: E1126 11:35:38.707083 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:35:39 crc kubenswrapper[4622]: I1126 11:35:39.604804 4622 generic.go:334] "Generic (PLEG): container finished" podID="778729ac-7836-4cec-897b-3d1144defef0" containerID="1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b" exitCode=0 Nov 26 11:35:39 crc kubenswrapper[4622]: I1126 11:35:39.604955 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerDied","Data":"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b"} Nov 26 11:35:40 crc kubenswrapper[4622]: I1126 11:35:40.616580 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerStarted","Data":"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2"} Nov 26 11:35:40 crc kubenswrapper[4622]: I1126 11:35:40.634148 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q8nm7" podStartSLOduration=2.086933374 podStartE2EDuration="4.634125618s" podCreationTimestamp="2025-11-26 11:35:36 +0000 UTC" firstStartedPulling="2025-11-26 11:35:37.585027014 +0000 UTC m=+1497.176238536" lastFinishedPulling="2025-11-26 11:35:40.132219259 +0000 UTC m=+1499.723430780" observedRunningTime="2025-11-26 11:35:40.630426178 +0000 UTC m=+1500.221637701" watchObservedRunningTime="2025-11-26 11:35:40.634125618 +0000 UTC m=+1500.225337139" Nov 26 11:35:46 crc kubenswrapper[4622]: I1126 11:35:46.851976 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:46 crc kubenswrapper[4622]: I1126 11:35:46.852765 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:46 crc kubenswrapper[4622]: I1126 11:35:46.894027 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:47 crc kubenswrapper[4622]: I1126 11:35:47.715742 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:47 crc kubenswrapper[4622]: I1126 11:35:47.767250 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:49 crc kubenswrapper[4622]: I1126 11:35:49.707281 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q8nm7" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="registry-server" containerID="cri-o://1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2" gracePeriod=2 Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.123794 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.147208 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content\") pod \"778729ac-7836-4cec-897b-3d1144defef0\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.147354 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dnlh\" (UniqueName: \"kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh\") pod \"778729ac-7836-4cec-897b-3d1144defef0\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.147583 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities\") pod \"778729ac-7836-4cec-897b-3d1144defef0\" (UID: \"778729ac-7836-4cec-897b-3d1144defef0\") " Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.153142 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities" (OuterVolumeSpecName: "utilities") pod "778729ac-7836-4cec-897b-3d1144defef0" (UID: "778729ac-7836-4cec-897b-3d1144defef0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.157030 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh" (OuterVolumeSpecName: "kube-api-access-6dnlh") pod "778729ac-7836-4cec-897b-3d1144defef0" (UID: "778729ac-7836-4cec-897b-3d1144defef0"). InnerVolumeSpecName "kube-api-access-6dnlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.191732 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "778729ac-7836-4cec-897b-3d1144defef0" (UID: "778729ac-7836-4cec-897b-3d1144defef0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.252302 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.252599 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dnlh\" (UniqueName: \"kubernetes.io/projected/778729ac-7836-4cec-897b-3d1144defef0-kube-api-access-6dnlh\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.252709 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/778729ac-7836-4cec-897b-3d1144defef0-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.720674 4622 generic.go:334] "Generic (PLEG): container finished" podID="778729ac-7836-4cec-897b-3d1144defef0" containerID="1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2" exitCode=0 Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.720724 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8nm7" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.720741 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerDied","Data":"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2"} Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.720837 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8nm7" event={"ID":"778729ac-7836-4cec-897b-3d1144defef0","Type":"ContainerDied","Data":"cae34fc24ed6785d55c96f7d7990c17015d0a44d7dcfa046fd2889e751fbe72a"} Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.720873 4622 scope.go:117] "RemoveContainer" containerID="1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.750762 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.751366 4622 scope.go:117] "RemoveContainer" containerID="1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.762364 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q8nm7"] Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.777730 4622 scope.go:117] "RemoveContainer" containerID="8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.799205 4622 scope.go:117] "RemoveContainer" containerID="1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2" Nov 26 11:35:50 crc kubenswrapper[4622]: E1126 11:35:50.799603 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2\": container with ID starting with 1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2 not found: ID does not exist" containerID="1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.799657 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2"} err="failed to get container status \"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2\": rpc error: code = NotFound desc = could not find container \"1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2\": container with ID starting with 1652036c74ae4634790b42958f4cdda126e96716bf85590b3ae0ca6ac5b235c2 not found: ID does not exist" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.799694 4622 scope.go:117] "RemoveContainer" containerID="1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b" Nov 26 11:35:50 crc kubenswrapper[4622]: E1126 11:35:50.800047 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b\": container with ID starting with 1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b not found: ID does not exist" containerID="1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.800079 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b"} err="failed to get container status \"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b\": rpc error: code = NotFound desc = could not find container \"1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b\": container with ID starting with 1b2637d0bc6e5d03309e54903cf339d85192b8a77f7138fb7cd3ae4f02e0176b not found: ID does not exist" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.800103 4622 scope.go:117] "RemoveContainer" containerID="8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7" Nov 26 11:35:50 crc kubenswrapper[4622]: E1126 11:35:50.800362 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7\": container with ID starting with 8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7 not found: ID does not exist" containerID="8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7" Nov 26 11:35:50 crc kubenswrapper[4622]: I1126 11:35:50.800419 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7"} err="failed to get container status \"8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7\": rpc error: code = NotFound desc = could not find container \"8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7\": container with ID starting with 8bf96c500ae8db249a8d785d69809aa4dc4b2dce0c31c1454409e48b5d7402f7 not found: ID does not exist" Nov 26 11:35:52 crc kubenswrapper[4622]: I1126 11:35:52.705927 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:35:52 crc kubenswrapper[4622]: E1126 11:35:52.706434 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:35:52 crc kubenswrapper[4622]: I1126 11:35:52.714780 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="778729ac-7836-4cec-897b-3d1144defef0" path="/var/lib/kubelet/pods/778729ac-7836-4cec-897b-3d1144defef0/volumes" Nov 26 11:35:55 crc kubenswrapper[4622]: I1126 11:35:55.776450 4622 generic.go:334] "Generic (PLEG): container finished" podID="2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" containerID="65a525ae10851f73fcb96b845f1d2c380cbbd3e4698790249966932536c5b9b0" exitCode=0 Nov 26 11:35:55 crc kubenswrapper[4622]: I1126 11:35:55.776553 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" event={"ID":"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4","Type":"ContainerDied","Data":"65a525ae10851f73fcb96b845f1d2c380cbbd3e4698790249966932536c5b9b0"} Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.148941 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.200763 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svcck\" (UniqueName: \"kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck\") pod \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.200875 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory\") pod \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.200939 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key\") pod \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\" (UID: \"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4\") " Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.208014 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck" (OuterVolumeSpecName: "kube-api-access-svcck") pod "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" (UID: "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4"). InnerVolumeSpecName "kube-api-access-svcck". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.226712 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" (UID: "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.226833 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory" (OuterVolumeSpecName: "inventory") pod "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" (UID: "2e7303f2-ffb9-44ef-ac57-0b84f87fdca4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.304032 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.304068 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.304079 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svcck\" (UniqueName: \"kubernetes.io/projected/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4-kube-api-access-svcck\") on node \"crc\" DevicePath \"\"" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.798909 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" event={"ID":"2e7303f2-ffb9-44ef-ac57-0b84f87fdca4","Type":"ContainerDied","Data":"a601a1a25982b9c99e5710fbcad0425fc1b089a1bef957dbfd50c67d734ccea1"} Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.798970 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a601a1a25982b9c99e5710fbcad0425fc1b089a1bef957dbfd50c67d734ccea1" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.799010 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.882190 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62"] Nov 26 11:35:57 crc kubenswrapper[4622]: E1126 11:35:57.882953 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="extract-content" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.882978 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="extract-content" Nov 26 11:35:57 crc kubenswrapper[4622]: E1126 11:35:57.883021 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.883030 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:57 crc kubenswrapper[4622]: E1126 11:35:57.883080 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="extract-utilities" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.883086 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="extract-utilities" Nov 26 11:35:57 crc kubenswrapper[4622]: E1126 11:35:57.883105 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="registry-server" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.883117 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="registry-server" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.883589 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.883617 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="778729ac-7836-4cec-897b-3d1144defef0" containerName="registry-server" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.888772 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.891207 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.891562 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.891730 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.892692 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:35:57 crc kubenswrapper[4622]: I1126 11:35:57.896332 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62"] Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.020464 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.020972 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpqqc\" (UniqueName: \"kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.021418 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.123023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.123085 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.123145 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpqqc\" (UniqueName: \"kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.134128 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.135095 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.140786 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpqqc\" (UniqueName: \"kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.211413 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.691059 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62"] Nov 26 11:35:58 crc kubenswrapper[4622]: I1126 11:35:58.809285 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" event={"ID":"999f62ec-225d-4c9b-a142-71faced65384","Type":"ContainerStarted","Data":"7f3a56dc4f3eca76a7f3f63f5cb162839263e0653016325aca7478996b609d23"} Nov 26 11:36:00 crc kubenswrapper[4622]: I1126 11:36:00.827289 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" event={"ID":"999f62ec-225d-4c9b-a142-71faced65384","Type":"ContainerStarted","Data":"4d7a8510101bc50dbbe80a4bf50c03e122e04cbd6c5cf18e5eb73f0fb1bbd94b"} Nov 26 11:36:00 crc kubenswrapper[4622]: I1126 11:36:00.850139 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" podStartSLOduration=2.628268572 podStartE2EDuration="3.850123126s" podCreationTimestamp="2025-11-26 11:35:57 +0000 UTC" firstStartedPulling="2025-11-26 11:35:58.701260157 +0000 UTC m=+1518.292471679" lastFinishedPulling="2025-11-26 11:35:59.923114712 +0000 UTC m=+1519.514326233" observedRunningTime="2025-11-26 11:36:00.841715264 +0000 UTC m=+1520.432926785" watchObservedRunningTime="2025-11-26 11:36:00.850123126 +0000 UTC m=+1520.441334647" Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.738967 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.742214 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.755340 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.860611 4622 generic.go:334] "Generic (PLEG): container finished" podID="999f62ec-225d-4c9b-a142-71faced65384" containerID="4d7a8510101bc50dbbe80a4bf50c03e122e04cbd6c5cf18e5eb73f0fb1bbd94b" exitCode=0 Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.860657 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" event={"ID":"999f62ec-225d-4c9b-a142-71faced65384","Type":"ContainerDied","Data":"4d7a8510101bc50dbbe80a4bf50c03e122e04cbd6c5cf18e5eb73f0fb1bbd94b"} Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.941821 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9blg\" (UniqueName: \"kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.942021 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:03 crc kubenswrapper[4622]: I1126 11:36:03.942043 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.037627 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9cd0-account-create-update-2jfbk"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.044604 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9blg\" (UniqueName: \"kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.044777 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.044808 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.045084 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-qz4zz"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.045576 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.045767 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.053241 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9cd0-account-create-update-2jfbk"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.063459 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6540-account-create-update-dgl4g"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.067319 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9blg\" (UniqueName: \"kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg\") pod \"redhat-marketplace-48w97\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.069800 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-qz4zz"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.076233 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6540-account-create-update-dgl4g"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.081468 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-6hk2z"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.086924 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-6hk2z"] Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.361665 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.706339 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:36:04 crc kubenswrapper[4622]: E1126 11:36:04.706886 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.733165 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="240474a3-cc8a-4bfd-991d-1e67a6286df3" path="/var/lib/kubelet/pods/240474a3-cc8a-4bfd-991d-1e67a6286df3/volumes" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.734090 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63a79d2a-f82a-49df-b959-e3c0b61cd34a" path="/var/lib/kubelet/pods/63a79d2a-f82a-49df-b959-e3c0b61cd34a/volumes" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.735202 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67d9311d-d647-40f4-b291-c3540b41f78c" path="/var/lib/kubelet/pods/67d9311d-d647-40f4-b291-c3540b41f78c/volumes" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.736453 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b095b953-f02f-4a15-b3e0-e698a15b848f" path="/var/lib/kubelet/pods/b095b953-f02f-4a15-b3e0-e698a15b848f/volumes" Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.759633 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:04 crc kubenswrapper[4622]: W1126 11:36:04.760494 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8d07ac2_2ea0_47d3_9f8d_21502dbdfb37.slice/crio-ecfc7493044a823a2e5f400b262f08ebab4a230d518d895f1527f6a346dc09d7 WatchSource:0}: Error finding container ecfc7493044a823a2e5f400b262f08ebab4a230d518d895f1527f6a346dc09d7: Status 404 returned error can't find the container with id ecfc7493044a823a2e5f400b262f08ebab4a230d518d895f1527f6a346dc09d7 Nov 26 11:36:04 crc kubenswrapper[4622]: I1126 11:36:04.875493 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerStarted","Data":"ecfc7493044a823a2e5f400b262f08ebab4a230d518d895f1527f6a346dc09d7"} Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.705250 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.877004 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key\") pod \"999f62ec-225d-4c9b-a142-71faced65384\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.877191 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory\") pod \"999f62ec-225d-4c9b-a142-71faced65384\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.877407 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpqqc\" (UniqueName: \"kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc\") pod \"999f62ec-225d-4c9b-a142-71faced65384\" (UID: \"999f62ec-225d-4c9b-a142-71faced65384\") " Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.884127 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc" (OuterVolumeSpecName: "kube-api-access-tpqqc") pod "999f62ec-225d-4c9b-a142-71faced65384" (UID: "999f62ec-225d-4c9b-a142-71faced65384"). InnerVolumeSpecName "kube-api-access-tpqqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.887845 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.888135 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62" event={"ID":"999f62ec-225d-4c9b-a142-71faced65384","Type":"ContainerDied","Data":"7f3a56dc4f3eca76a7f3f63f5cb162839263e0653016325aca7478996b609d23"} Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.888186 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f3a56dc4f3eca76a7f3f63f5cb162839263e0653016325aca7478996b609d23" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.895049 4622 generic.go:334] "Generic (PLEG): container finished" podID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerID="a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45" exitCode=0 Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.895132 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerDied","Data":"a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45"} Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.916905 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "999f62ec-225d-4c9b-a142-71faced65384" (UID: "999f62ec-225d-4c9b-a142-71faced65384"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.928156 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory" (OuterVolumeSpecName: "inventory") pod "999f62ec-225d-4c9b-a142-71faced65384" (UID: "999f62ec-225d-4c9b-a142-71faced65384"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.958316 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5"] Nov 26 11:36:05 crc kubenswrapper[4622]: E1126 11:36:05.958753 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="999f62ec-225d-4c9b-a142-71faced65384" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.958773 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="999f62ec-225d-4c9b-a142-71faced65384" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.962158 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="999f62ec-225d-4c9b-a142-71faced65384" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.963108 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.968783 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5"] Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.980649 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.980765 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpqqc\" (UniqueName: \"kubernetes.io/projected/999f62ec-225d-4c9b-a142-71faced65384-kube-api-access-tpqqc\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:05 crc kubenswrapper[4622]: I1126 11:36:05.980839 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999f62ec-225d-4c9b-a142-71faced65384-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.083253 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkzgp\" (UniqueName: \"kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.083775 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.083917 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.185917 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.186146 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkzgp\" (UniqueName: \"kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.186270 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.190765 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.190807 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.200978 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkzgp\" (UniqueName: \"kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56fl5\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.295153 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.769408 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5"] Nov 26 11:36:06 crc kubenswrapper[4622]: W1126 11:36:06.769491 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9b52e72_9a14_4f7c_b704_2dd85b2f4568.slice/crio-063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb WatchSource:0}: Error finding container 063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb: Status 404 returned error can't find the container with id 063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb Nov 26 11:36:06 crc kubenswrapper[4622]: I1126 11:36:06.907965 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" event={"ID":"f9b52e72-9a14-4f7c-b704-2dd85b2f4568","Type":"ContainerStarted","Data":"063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb"} Nov 26 11:36:07 crc kubenswrapper[4622]: I1126 11:36:07.919738 4622 generic.go:334] "Generic (PLEG): container finished" podID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerID="46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67" exitCode=0 Nov 26 11:36:07 crc kubenswrapper[4622]: I1126 11:36:07.919846 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerDied","Data":"46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67"} Nov 26 11:36:07 crc kubenswrapper[4622]: I1126 11:36:07.923717 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" event={"ID":"f9b52e72-9a14-4f7c-b704-2dd85b2f4568","Type":"ContainerStarted","Data":"736650ede3413ef0282995ac0b1dbaaa3dc8bebdf58e462ea7baa9b4214451ee"} Nov 26 11:36:07 crc kubenswrapper[4622]: I1126 11:36:07.965953 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" podStartSLOduration=2.464092553 podStartE2EDuration="2.965933006s" podCreationTimestamp="2025-11-26 11:36:05 +0000 UTC" firstStartedPulling="2025-11-26 11:36:06.772279947 +0000 UTC m=+1526.363491470" lastFinishedPulling="2025-11-26 11:36:07.274120402 +0000 UTC m=+1526.865331923" observedRunningTime="2025-11-26 11:36:07.963805411 +0000 UTC m=+1527.555016933" watchObservedRunningTime="2025-11-26 11:36:07.965933006 +0000 UTC m=+1527.557144527" Nov 26 11:36:08 crc kubenswrapper[4622]: I1126 11:36:08.938156 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerStarted","Data":"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee"} Nov 26 11:36:08 crc kubenswrapper[4622]: I1126 11:36:08.972134 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-48w97" podStartSLOduration=3.421325921 podStartE2EDuration="5.972109672s" podCreationTimestamp="2025-11-26 11:36:03 +0000 UTC" firstStartedPulling="2025-11-26 11:36:05.897535175 +0000 UTC m=+1525.488746697" lastFinishedPulling="2025-11-26 11:36:08.448318925 +0000 UTC m=+1528.039530448" observedRunningTime="2025-11-26 11:36:08.955993277 +0000 UTC m=+1528.547204799" watchObservedRunningTime="2025-11-26 11:36:08.972109672 +0000 UTC m=+1528.563321195" Nov 26 11:36:09 crc kubenswrapper[4622]: I1126 11:36:09.026409 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-zxx5f"] Nov 26 11:36:09 crc kubenswrapper[4622]: I1126 11:36:09.036377 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-zxx5f"] Nov 26 11:36:10 crc kubenswrapper[4622]: I1126 11:36:10.042774 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-89f8-account-create-update-5chl8"] Nov 26 11:36:10 crc kubenswrapper[4622]: I1126 11:36:10.050675 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-89f8-account-create-update-5chl8"] Nov 26 11:36:10 crc kubenswrapper[4622]: I1126 11:36:10.717165 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e816521-5607-43d2-89b6-d8643c61eb01" path="/var/lib/kubelet/pods/7e816521-5607-43d2-89b6-d8643c61eb01/volumes" Nov 26 11:36:10 crc kubenswrapper[4622]: I1126 11:36:10.718044 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8746b761-9144-452e-aa5c-4c4ca60b03ad" path="/var/lib/kubelet/pods/8746b761-9144-452e-aa5c-4c4ca60b03ad/volumes" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.362604 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.363134 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.396966 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.645623 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.647841 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.657947 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.684916 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fftc\" (UniqueName: \"kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.684953 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.685100 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.787010 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.787491 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.787851 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fftc\" (UniqueName: \"kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.787883 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.788462 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.804356 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fftc\" (UniqueName: \"kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc\") pod \"community-operators-558cl\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:14 crc kubenswrapper[4622]: I1126 11:36:14.975462 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:15 crc kubenswrapper[4622]: I1126 11:36:15.064535 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:15 crc kubenswrapper[4622]: I1126 11:36:15.439294 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:16 crc kubenswrapper[4622]: I1126 11:36:16.008431 4622 generic.go:334] "Generic (PLEG): container finished" podID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerID="9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd" exitCode=0 Nov 26 11:36:16 crc kubenswrapper[4622]: I1126 11:36:16.008481 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerDied","Data":"9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd"} Nov 26 11:36:16 crc kubenswrapper[4622]: I1126 11:36:16.008876 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerStarted","Data":"c9c14d157400771209116994f2dd7a85daa45d9b3034023a9212b21638215c51"} Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.017995 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerStarted","Data":"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1"} Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.430621 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.430999 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-48w97" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="registry-server" containerID="cri-o://01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee" gracePeriod=2 Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.848734 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.953932 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities\") pod \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.954093 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9blg\" (UniqueName: \"kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg\") pod \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.954334 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content\") pod \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\" (UID: \"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37\") " Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.955059 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities" (OuterVolumeSpecName: "utilities") pod "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" (UID: "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.959351 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg" (OuterVolumeSpecName: "kube-api-access-l9blg") pod "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" (UID: "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37"). InnerVolumeSpecName "kube-api-access-l9blg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:36:17 crc kubenswrapper[4622]: I1126 11:36:17.969888 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" (UID: "a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.031667 4622 generic.go:334] "Generic (PLEG): container finished" podID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerID="01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee" exitCode=0 Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.031721 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerDied","Data":"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee"} Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.032631 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48w97" event={"ID":"a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37","Type":"ContainerDied","Data":"ecfc7493044a823a2e5f400b262f08ebab4a230d518d895f1527f6a346dc09d7"} Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.031761 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48w97" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.032697 4622 scope.go:117] "RemoveContainer" containerID="01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.035488 4622 generic.go:334] "Generic (PLEG): container finished" podID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerID="323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1" exitCode=0 Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.035548 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerDied","Data":"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1"} Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.056727 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.056813 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.057043 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9blg\" (UniqueName: \"kubernetes.io/projected/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37-kube-api-access-l9blg\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.063307 4622 scope.go:117] "RemoveContainer" containerID="46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.069545 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.077810 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-48w97"] Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.103161 4622 scope.go:117] "RemoveContainer" containerID="a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.133832 4622 scope.go:117] "RemoveContainer" containerID="01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee" Nov 26 11:36:18 crc kubenswrapper[4622]: E1126 11:36:18.134257 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee\": container with ID starting with 01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee not found: ID does not exist" containerID="01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.134292 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee"} err="failed to get container status \"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee\": rpc error: code = NotFound desc = could not find container \"01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee\": container with ID starting with 01734e334042b7c788ad40c446ed9164d0b3d91090b848e891a8524faa632aee not found: ID does not exist" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.134312 4622 scope.go:117] "RemoveContainer" containerID="46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67" Nov 26 11:36:18 crc kubenswrapper[4622]: E1126 11:36:18.134548 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67\": container with ID starting with 46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67 not found: ID does not exist" containerID="46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.134583 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67"} err="failed to get container status \"46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67\": rpc error: code = NotFound desc = could not find container \"46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67\": container with ID starting with 46e659ee83c64131232b55247837d460a6d520ff7b7a58efecab8ffcf402ba67 not found: ID does not exist" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.134601 4622 scope.go:117] "RemoveContainer" containerID="a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45" Nov 26 11:36:18 crc kubenswrapper[4622]: E1126 11:36:18.134795 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45\": container with ID starting with a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45 not found: ID does not exist" containerID="a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.134817 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45"} err="failed to get container status \"a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45\": rpc error: code = NotFound desc = could not find container \"a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45\": container with ID starting with a02c8517d2b7b14dce8fa896ce36d079569589f8018b1fb8ec08b45c8e122a45 not found: ID does not exist" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.706714 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:36:18 crc kubenswrapper[4622]: E1126 11:36:18.707060 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:36:18 crc kubenswrapper[4622]: I1126 11:36:18.719595 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" path="/var/lib/kubelet/pods/a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37/volumes" Nov 26 11:36:19 crc kubenswrapper[4622]: I1126 11:36:19.051962 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerStarted","Data":"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e"} Nov 26 11:36:19 crc kubenswrapper[4622]: I1126 11:36:19.075438 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-558cl" podStartSLOduration=3.123740798 podStartE2EDuration="5.07541931s" podCreationTimestamp="2025-11-26 11:36:14 +0000 UTC" firstStartedPulling="2025-11-26 11:36:16.010461256 +0000 UTC m=+1535.601672778" lastFinishedPulling="2025-11-26 11:36:17.962139769 +0000 UTC m=+1537.553351290" observedRunningTime="2025-11-26 11:36:19.067545414 +0000 UTC m=+1538.658756937" watchObservedRunningTime="2025-11-26 11:36:19.07541931 +0000 UTC m=+1538.666630822" Nov 26 11:36:24 crc kubenswrapper[4622]: I1126 11:36:24.976280 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:24 crc kubenswrapper[4622]: I1126 11:36:24.977015 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:25 crc kubenswrapper[4622]: I1126 11:36:25.014734 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:25 crc kubenswrapper[4622]: I1126 11:36:25.144338 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:25 crc kubenswrapper[4622]: I1126 11:36:25.253611 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:27 crc kubenswrapper[4622]: I1126 11:36:27.129965 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-558cl" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="registry-server" containerID="cri-o://e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e" gracePeriod=2 Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.034229 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.096958 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities\") pod \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.097111 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content\") pod \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.097178 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fftc\" (UniqueName: \"kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc\") pod \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\" (UID: \"a716d81d-19d8-4a4e-b6aa-e562f262f7bf\") " Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.097693 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities" (OuterVolumeSpecName: "utilities") pod "a716d81d-19d8-4a4e-b6aa-e562f262f7bf" (UID: "a716d81d-19d8-4a4e-b6aa-e562f262f7bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.102433 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc" (OuterVolumeSpecName: "kube-api-access-5fftc") pod "a716d81d-19d8-4a4e-b6aa-e562f262f7bf" (UID: "a716d81d-19d8-4a4e-b6aa-e562f262f7bf"). InnerVolumeSpecName "kube-api-access-5fftc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.137954 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a716d81d-19d8-4a4e-b6aa-e562f262f7bf" (UID: "a716d81d-19d8-4a4e-b6aa-e562f262f7bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.141959 4622 generic.go:334] "Generic (PLEG): container finished" podID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerID="e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e" exitCode=0 Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.141998 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerDied","Data":"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e"} Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.142027 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-558cl" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.142058 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-558cl" event={"ID":"a716d81d-19d8-4a4e-b6aa-e562f262f7bf","Type":"ContainerDied","Data":"c9c14d157400771209116994f2dd7a85daa45d9b3034023a9212b21638215c51"} Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.142077 4622 scope.go:117] "RemoveContainer" containerID="e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.176396 4622 scope.go:117] "RemoveContainer" containerID="323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.181421 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.189974 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-558cl"] Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.200137 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fftc\" (UniqueName: \"kubernetes.io/projected/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-kube-api-access-5fftc\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.200166 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.200178 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a716d81d-19d8-4a4e-b6aa-e562f262f7bf-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.219245 4622 scope.go:117] "RemoveContainer" containerID="9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.238872 4622 scope.go:117] "RemoveContainer" containerID="e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e" Nov 26 11:36:28 crc kubenswrapper[4622]: E1126 11:36:28.239315 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e\": container with ID starting with e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e not found: ID does not exist" containerID="e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.239377 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e"} err="failed to get container status \"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e\": rpc error: code = NotFound desc = could not find container \"e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e\": container with ID starting with e9568137f292ba75dd9bc376cb324c70d732d0123047c2d3c03799dfe0f3008e not found: ID does not exist" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.239406 4622 scope.go:117] "RemoveContainer" containerID="323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1" Nov 26 11:36:28 crc kubenswrapper[4622]: E1126 11:36:28.239728 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1\": container with ID starting with 323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1 not found: ID does not exist" containerID="323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.239756 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1"} err="failed to get container status \"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1\": rpc error: code = NotFound desc = could not find container \"323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1\": container with ID starting with 323ec34f0e61f34e55cb187ca10946050e821d3b75920a5f67cba1423c1514f1 not found: ID does not exist" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.239774 4622 scope.go:117] "RemoveContainer" containerID="9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd" Nov 26 11:36:28 crc kubenswrapper[4622]: E1126 11:36:28.240639 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd\": container with ID starting with 9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd not found: ID does not exist" containerID="9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.240689 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd"} err="failed to get container status \"9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd\": rpc error: code = NotFound desc = could not find container \"9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd\": container with ID starting with 9721d7b3f2b36f01bd46d3401c569096c509b8bb626b2f46980b12ffc14692bd not found: ID does not exist" Nov 26 11:36:28 crc kubenswrapper[4622]: I1126 11:36:28.716406 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" path="/var/lib/kubelet/pods/a716d81d-19d8-4a4e-b6aa-e562f262f7bf/volumes" Nov 26 11:36:30 crc kubenswrapper[4622]: I1126 11:36:30.026630 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hjhvx"] Nov 26 11:36:30 crc kubenswrapper[4622]: I1126 11:36:30.032976 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hjhvx"] Nov 26 11:36:30 crc kubenswrapper[4622]: I1126 11:36:30.711989 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:36:30 crc kubenswrapper[4622]: E1126 11:36:30.713064 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:36:30 crc kubenswrapper[4622]: I1126 11:36:30.717004 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bbaef1d-2eba-4b11-a853-97b7af40b7e7" path="/var/lib/kubelet/pods/8bbaef1d-2eba-4b11-a853-97b7af40b7e7/volumes" Nov 26 11:36:31 crc kubenswrapper[4622]: I1126 11:36:31.032341 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-20d7-account-create-update-x2dsx"] Nov 26 11:36:31 crc kubenswrapper[4622]: I1126 11:36:31.038924 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-20d7-account-create-update-x2dsx"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.058944 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-4d48-account-create-update-bmtqm"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.070401 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-4dj9p"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.076054 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6k9fh"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.083540 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-aa81-account-create-update-hlh9d"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.092197 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-dlkff"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.099389 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-4d48-account-create-update-bmtqm"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.104463 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6k9fh"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.109407 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-aa81-account-create-update-hlh9d"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.114739 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-4dj9p"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.119665 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-dlkff"] Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.716003 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1058139e-d30c-419c-9819-91c5418144fa" path="/var/lib/kubelet/pods/1058139e-d30c-419c-9819-91c5418144fa/volumes" Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.717367 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c071132-6163-40db-92b0-121c2d362a69" path="/var/lib/kubelet/pods/2c071132-6163-40db-92b0-121c2d362a69/volumes" Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.718093 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="690c5cab-acaa-440f-803e-5c8ae6705486" path="/var/lib/kubelet/pods/690c5cab-acaa-440f-803e-5c8ae6705486/volumes" Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.718693 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a87d2790-eea1-4fb5-8f6c-fb4931e66b28" path="/var/lib/kubelet/pods/a87d2790-eea1-4fb5-8f6c-fb4931e66b28/volumes" Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.719745 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b537dfd7-4088-4292-bf23-aedf0e2bd0df" path="/var/lib/kubelet/pods/b537dfd7-4088-4292-bf23-aedf0e2bd0df/volumes" Nov 26 11:36:32 crc kubenswrapper[4622]: I1126 11:36:32.720259 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d731d848-aaea-425b-be07-5e852d58a45d" path="/var/lib/kubelet/pods/d731d848-aaea-425b-be07-5e852d58a45d/volumes" Nov 26 11:36:38 crc kubenswrapper[4622]: I1126 11:36:38.029105 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-mwptb"] Nov 26 11:36:38 crc kubenswrapper[4622]: I1126 11:36:38.038354 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-mwptb"] Nov 26 11:36:38 crc kubenswrapper[4622]: I1126 11:36:38.716268 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1451a45-9875-4a5e-b265-bd0c240cb628" path="/var/lib/kubelet/pods/d1451a45-9875-4a5e-b265-bd0c240cb628/volumes" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.201021 4622 scope.go:117] "RemoveContainer" containerID="7f420817e41c1a228b0aeb8ce650ca646742a46240c9bec898326fbc92a98d49" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.225549 4622 scope.go:117] "RemoveContainer" containerID="4a470a60b171d72d93568e421998add90ec1f5d61d1333577efdae7aabc3d365" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.262718 4622 scope.go:117] "RemoveContainer" containerID="eb592874236e8fc40f5048a3226ede035fefc5bfddfcc7bbc05beaf76d4fe79b" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.291248 4622 scope.go:117] "RemoveContainer" containerID="dc70b9e7db144cb00031158f11022f52078086c017a84a09c2bf0471931c5104" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.308596 4622 generic.go:334] "Generic (PLEG): container finished" podID="f9b52e72-9a14-4f7c-b704-2dd85b2f4568" containerID="736650ede3413ef0282995ac0b1dbaaa3dc8bebdf58e462ea7baa9b4214451ee" exitCode=0 Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.308681 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" event={"ID":"f9b52e72-9a14-4f7c-b704-2dd85b2f4568","Type":"ContainerDied","Data":"736650ede3413ef0282995ac0b1dbaaa3dc8bebdf58e462ea7baa9b4214451ee"} Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.347627 4622 scope.go:117] "RemoveContainer" containerID="d4745f4ad95fab6ede096d0eaa7dcbe6208f708deb300d93b1cfc23866c1ecf1" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.370274 4622 scope.go:117] "RemoveContainer" containerID="a7802c79a3f358342d9f07e9294e0f73b3b43d6578ca27404dd2d3c1e30be7c3" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.402307 4622 scope.go:117] "RemoveContainer" containerID="e88e8776f1350ebc4e309ec3edf5a00865c3a95626d0ac0353f5d9a1ffe78c59" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.435434 4622 scope.go:117] "RemoveContainer" containerID="f703df987afdc852d4ad06b2a0847989c84279ed13ea7f9c15d5872ccab10ab1" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.456317 4622 scope.go:117] "RemoveContainer" containerID="ade24a4ee019823fdb8252a10bd41beee53da372a730b7350233dbb6656f0749" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.476344 4622 scope.go:117] "RemoveContainer" containerID="56e79ca988efded5e0e4bc2b087d8f27e315c36ca34e49d8faee9a4a2cb0a2a9" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.490460 4622 scope.go:117] "RemoveContainer" containerID="da0c22113970c5ace2726fb60821d5564da901faf8ce88a8881313486d7dc926" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.508512 4622 scope.go:117] "RemoveContainer" containerID="d2d11cca6f9226bb774372212b8c86d6279ddf6b04da7c4946f55d5d7089a8cf" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.525001 4622 scope.go:117] "RemoveContainer" containerID="395b2507a1545ee5fbec9681ab068fefbfcef724bd285b06e54a2480accf6f6e" Nov 26 11:36:43 crc kubenswrapper[4622]: I1126 11:36:43.541910 4622 scope.go:117] "RemoveContainer" containerID="c09a373ee7912a4866ad896ded228f0c616fc3dff7abd1b07e86ae9bd436ce67" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.685034 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.706429 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:36:44 crc kubenswrapper[4622]: E1126 11:36:44.706778 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.769581 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkzgp\" (UniqueName: \"kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp\") pod \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.775252 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp" (OuterVolumeSpecName: "kube-api-access-tkzgp") pod "f9b52e72-9a14-4f7c-b704-2dd85b2f4568" (UID: "f9b52e72-9a14-4f7c-b704-2dd85b2f4568"). InnerVolumeSpecName "kube-api-access-tkzgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.874495 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory\") pod \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.874603 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key\") pod \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\" (UID: \"f9b52e72-9a14-4f7c-b704-2dd85b2f4568\") " Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.875520 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkzgp\" (UniqueName: \"kubernetes.io/projected/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-kube-api-access-tkzgp\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.896896 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory" (OuterVolumeSpecName: "inventory") pod "f9b52e72-9a14-4f7c-b704-2dd85b2f4568" (UID: "f9b52e72-9a14-4f7c-b704-2dd85b2f4568"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.897399 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9b52e72-9a14-4f7c-b704-2dd85b2f4568" (UID: "f9b52e72-9a14-4f7c-b704-2dd85b2f4568"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.977935 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:44 crc kubenswrapper[4622]: I1126 11:36:44.978267 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b52e72-9a14-4f7c-b704-2dd85b2f4568-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.336909 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" event={"ID":"f9b52e72-9a14-4f7c-b704-2dd85b2f4568","Type":"ContainerDied","Data":"063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb"} Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.336961 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="063c2b47dd891eb339042949cd035f9ed4b491798403650af453a513714968cb" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.336963 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413203 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fkf6t"] Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413548 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="extract-content" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413567 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="extract-content" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413578 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413585 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413601 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="extract-utilities" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413606 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="extract-utilities" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413617 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413623 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413633 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="extract-content" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413639 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="extract-content" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413658 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="extract-utilities" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413664 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="extract-utilities" Nov 26 11:36:45 crc kubenswrapper[4622]: E1126 11:36:45.413673 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b52e72-9a14-4f7c-b704-2dd85b2f4568" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413679 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b52e72-9a14-4f7c-b704-2dd85b2f4568" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413844 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a716d81d-19d8-4a4e-b6aa-e562f262f7bf" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413856 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d07ac2-2ea0-47d3-9f8d-21502dbdfb37" containerName="registry-server" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.413862 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b52e72-9a14-4f7c-b704-2dd85b2f4568" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.414384 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.416305 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.416482 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.416566 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.416677 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.426912 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fkf6t"] Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.491221 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.491264 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zfrm\" (UniqueName: \"kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.491396 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.594600 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.594762 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.594839 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zfrm\" (UniqueName: \"kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.599083 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.600145 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.610889 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zfrm\" (UniqueName: \"kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm\") pod \"ssh-known-hosts-edpm-deployment-fkf6t\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:45 crc kubenswrapper[4622]: I1126 11:36:45.736510 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:46 crc kubenswrapper[4622]: I1126 11:36:46.202402 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fkf6t"] Nov 26 11:36:46 crc kubenswrapper[4622]: I1126 11:36:46.349535 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" event={"ID":"829ddce0-8433-4217-b8a4-945579a8421c","Type":"ContainerStarted","Data":"487b65ef5e12b23ae584766cae5b93532c4310cd9c9eb432a0013fb780bf54d4"} Nov 26 11:36:47 crc kubenswrapper[4622]: I1126 11:36:47.359763 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" event={"ID":"829ddce0-8433-4217-b8a4-945579a8421c","Type":"ContainerStarted","Data":"fdf1cc346dc50303917192d6fe434050dd4718bc0d9384983590223776bf9293"} Nov 26 11:36:47 crc kubenswrapper[4622]: I1126 11:36:47.376990 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" podStartSLOduration=1.40969697 podStartE2EDuration="2.37697052s" podCreationTimestamp="2025-11-26 11:36:45 +0000 UTC" firstStartedPulling="2025-11-26 11:36:46.206936431 +0000 UTC m=+1565.798147954" lastFinishedPulling="2025-11-26 11:36:47.174209982 +0000 UTC m=+1566.765421504" observedRunningTime="2025-11-26 11:36:47.373583279 +0000 UTC m=+1566.964794801" watchObservedRunningTime="2025-11-26 11:36:47.37697052 +0000 UTC m=+1566.968182041" Nov 26 11:36:52 crc kubenswrapper[4622]: I1126 11:36:52.408547 4622 generic.go:334] "Generic (PLEG): container finished" podID="829ddce0-8433-4217-b8a4-945579a8421c" containerID="fdf1cc346dc50303917192d6fe434050dd4718bc0d9384983590223776bf9293" exitCode=0 Nov 26 11:36:52 crc kubenswrapper[4622]: I1126 11:36:52.408552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" event={"ID":"829ddce0-8433-4217-b8a4-945579a8421c","Type":"ContainerDied","Data":"fdf1cc346dc50303917192d6fe434050dd4718bc0d9384983590223776bf9293"} Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.815639 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.881359 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0\") pod \"829ddce0-8433-4217-b8a4-945579a8421c\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.881550 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam\") pod \"829ddce0-8433-4217-b8a4-945579a8421c\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.881753 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zfrm\" (UniqueName: \"kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm\") pod \"829ddce0-8433-4217-b8a4-945579a8421c\" (UID: \"829ddce0-8433-4217-b8a4-945579a8421c\") " Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.887939 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm" (OuterVolumeSpecName: "kube-api-access-6zfrm") pod "829ddce0-8433-4217-b8a4-945579a8421c" (UID: "829ddce0-8433-4217-b8a4-945579a8421c"). InnerVolumeSpecName "kube-api-access-6zfrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.908906 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "829ddce0-8433-4217-b8a4-945579a8421c" (UID: "829ddce0-8433-4217-b8a4-945579a8421c"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.910517 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "829ddce0-8433-4217-b8a4-945579a8421c" (UID: "829ddce0-8433-4217-b8a4-945579a8421c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.985909 4622 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.985939 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/829ddce0-8433-4217-b8a4-945579a8421c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:53 crc kubenswrapper[4622]: I1126 11:36:53.985951 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zfrm\" (UniqueName: \"kubernetes.io/projected/829ddce0-8433-4217-b8a4-945579a8421c-kube-api-access-6zfrm\") on node \"crc\" DevicePath \"\"" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.432316 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" event={"ID":"829ddce0-8433-4217-b8a4-945579a8421c","Type":"ContainerDied","Data":"487b65ef5e12b23ae584766cae5b93532c4310cd9c9eb432a0013fb780bf54d4"} Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.432693 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="487b65ef5e12b23ae584766cae5b93532c4310cd9c9eb432a0013fb780bf54d4" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.432435 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fkf6t" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.486038 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb"] Nov 26 11:36:54 crc kubenswrapper[4622]: E1126 11:36:54.486452 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="829ddce0-8433-4217-b8a4-945579a8421c" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.486471 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="829ddce0-8433-4217-b8a4-945579a8421c" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.486682 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="829ddce0-8433-4217-b8a4-945579a8421c" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.487272 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.488815 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.490387 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.490613 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.493994 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.494979 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb"] Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.597951 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.598152 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrzkz\" (UniqueName: \"kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.598215 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.700595 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrzkz\" (UniqueName: \"kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.700743 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.700818 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.705047 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.705247 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.715769 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrzkz\" (UniqueName: \"kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ff7zb\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:54 crc kubenswrapper[4622]: I1126 11:36:54.813386 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:36:55 crc kubenswrapper[4622]: I1126 11:36:55.045433 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kfrqh"] Nov 26 11:36:55 crc kubenswrapper[4622]: I1126 11:36:55.053160 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kfrqh"] Nov 26 11:36:55 crc kubenswrapper[4622]: I1126 11:36:55.306943 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb"] Nov 26 11:36:55 crc kubenswrapper[4622]: I1126 11:36:55.445166 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" event={"ID":"07d76be6-49bc-4d2d-b062-726853c7c6e1","Type":"ContainerStarted","Data":"8120aa07933c0de3fdaa63bd1033bf76982cb154ad8f495cf7496419c7e116c6"} Nov 26 11:36:56 crc kubenswrapper[4622]: I1126 11:36:56.457804 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" event={"ID":"07d76be6-49bc-4d2d-b062-726853c7c6e1","Type":"ContainerStarted","Data":"066d67ce7b7206804751cc96fca97453836ded6ee87038b977ae28c1329bac73"} Nov 26 11:36:56 crc kubenswrapper[4622]: I1126 11:36:56.476811 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" podStartSLOduration=1.796106241 podStartE2EDuration="2.47678562s" podCreationTimestamp="2025-11-26 11:36:54 +0000 UTC" firstStartedPulling="2025-11-26 11:36:55.308801951 +0000 UTC m=+1574.900013473" lastFinishedPulling="2025-11-26 11:36:55.98948133 +0000 UTC m=+1575.580692852" observedRunningTime="2025-11-26 11:36:56.475543757 +0000 UTC m=+1576.066755279" watchObservedRunningTime="2025-11-26 11:36:56.47678562 +0000 UTC m=+1576.067997142" Nov 26 11:36:56 crc kubenswrapper[4622]: I1126 11:36:56.706996 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:36:56 crc kubenswrapper[4622]: E1126 11:36:56.707585 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:36:56 crc kubenswrapper[4622]: I1126 11:36:56.715143 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="351c2fe8-f231-428a-b0bf-bc8642091b55" path="/var/lib/kubelet/pods/351c2fe8-f231-428a-b0bf-bc8642091b55/volumes" Nov 26 11:37:02 crc kubenswrapper[4622]: I1126 11:37:02.518942 4622 generic.go:334] "Generic (PLEG): container finished" podID="07d76be6-49bc-4d2d-b062-726853c7c6e1" containerID="066d67ce7b7206804751cc96fca97453836ded6ee87038b977ae28c1329bac73" exitCode=0 Nov 26 11:37:02 crc kubenswrapper[4622]: I1126 11:37:02.519042 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" event={"ID":"07d76be6-49bc-4d2d-b062-726853c7c6e1","Type":"ContainerDied","Data":"066d67ce7b7206804751cc96fca97453836ded6ee87038b977ae28c1329bac73"} Nov 26 11:37:03 crc kubenswrapper[4622]: I1126 11:37:03.891873 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.011935 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key\") pod \"07d76be6-49bc-4d2d-b062-726853c7c6e1\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.012749 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrzkz\" (UniqueName: \"kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz\") pod \"07d76be6-49bc-4d2d-b062-726853c7c6e1\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.012960 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory\") pod \"07d76be6-49bc-4d2d-b062-726853c7c6e1\" (UID: \"07d76be6-49bc-4d2d-b062-726853c7c6e1\") " Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.027275 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz" (OuterVolumeSpecName: "kube-api-access-nrzkz") pod "07d76be6-49bc-4d2d-b062-726853c7c6e1" (UID: "07d76be6-49bc-4d2d-b062-726853c7c6e1"). InnerVolumeSpecName "kube-api-access-nrzkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.038720 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "07d76be6-49bc-4d2d-b062-726853c7c6e1" (UID: "07d76be6-49bc-4d2d-b062-726853c7c6e1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.040440 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory" (OuterVolumeSpecName: "inventory") pod "07d76be6-49bc-4d2d-b062-726853c7c6e1" (UID: "07d76be6-49bc-4d2d-b062-726853c7c6e1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.118038 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.118114 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrzkz\" (UniqueName: \"kubernetes.io/projected/07d76be6-49bc-4d2d-b062-726853c7c6e1-kube-api-access-nrzkz\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.118137 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d76be6-49bc-4d2d-b062-726853c7c6e1-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.551641 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" event={"ID":"07d76be6-49bc-4d2d-b062-726853c7c6e1","Type":"ContainerDied","Data":"8120aa07933c0de3fdaa63bd1033bf76982cb154ad8f495cf7496419c7e116c6"} Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.551682 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8120aa07933c0de3fdaa63bd1033bf76982cb154ad8f495cf7496419c7e116c6" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.551745 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.598052 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml"] Nov 26 11:37:04 crc kubenswrapper[4622]: E1126 11:37:04.598421 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d76be6-49bc-4d2d-b062-726853c7c6e1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.598441 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d76be6-49bc-4d2d-b062-726853c7c6e1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.598613 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d76be6-49bc-4d2d-b062-726853c7c6e1" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.599226 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.601430 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.601598 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.601599 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.602733 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.604404 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml"] Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.729542 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.729631 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.729719 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmskq\" (UniqueName: \"kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.832135 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmskq\" (UniqueName: \"kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.832388 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.832493 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.836352 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.836459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.848275 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmskq\" (UniqueName: \"kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:04 crc kubenswrapper[4622]: I1126 11:37:04.919456 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:05 crc kubenswrapper[4622]: I1126 11:37:05.407496 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml"] Nov 26 11:37:05 crc kubenswrapper[4622]: I1126 11:37:05.561205 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" event={"ID":"2c258498-1776-4080-90ac-a37243370dfe","Type":"ContainerStarted","Data":"3c96af07be9c43613ab4dd4904ce5912d40d59c72375417e2c59ed470d6f6646"} Nov 26 11:37:06 crc kubenswrapper[4622]: I1126 11:37:06.572266 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" event={"ID":"2c258498-1776-4080-90ac-a37243370dfe","Type":"ContainerStarted","Data":"1d7fd813cdd074221cff9d9c6f4cf45662257476bb786721ebde10c148ca6c10"} Nov 26 11:37:09 crc kubenswrapper[4622]: I1126 11:37:09.032095 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" podStartSLOduration=4.423890473 podStartE2EDuration="5.03206428s" podCreationTimestamp="2025-11-26 11:37:04 +0000 UTC" firstStartedPulling="2025-11-26 11:37:05.413836492 +0000 UTC m=+1585.005048015" lastFinishedPulling="2025-11-26 11:37:06.0220103 +0000 UTC m=+1585.613221822" observedRunningTime="2025-11-26 11:37:06.586878856 +0000 UTC m=+1586.178090378" watchObservedRunningTime="2025-11-26 11:37:09.03206428 +0000 UTC m=+1588.623275803" Nov 26 11:37:09 crc kubenswrapper[4622]: I1126 11:37:09.037274 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-s866n"] Nov 26 11:37:09 crc kubenswrapper[4622]: I1126 11:37:09.048308 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-s866n"] Nov 26 11:37:10 crc kubenswrapper[4622]: I1126 11:37:10.718770 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18f1d84a-d71d-4c14-8d60-7e3dc4061fb1" path="/var/lib/kubelet/pods/18f1d84a-d71d-4c14-8d60-7e3dc4061fb1/volumes" Nov 26 11:37:11 crc kubenswrapper[4622]: I1126 11:37:11.706115 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:37:11 crc kubenswrapper[4622]: E1126 11:37:11.706664 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:37:13 crc kubenswrapper[4622]: I1126 11:37:13.653515 4622 generic.go:334] "Generic (PLEG): container finished" podID="2c258498-1776-4080-90ac-a37243370dfe" containerID="1d7fd813cdd074221cff9d9c6f4cf45662257476bb786721ebde10c148ca6c10" exitCode=0 Nov 26 11:37:13 crc kubenswrapper[4622]: I1126 11:37:13.653576 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" event={"ID":"2c258498-1776-4080-90ac-a37243370dfe","Type":"ContainerDied","Data":"1d7fd813cdd074221cff9d9c6f4cf45662257476bb786721ebde10c148ca6c10"} Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.051619 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-2mgt7"] Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.062914 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-2mgt7"] Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.113760 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.167631 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmskq\" (UniqueName: \"kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq\") pod \"2c258498-1776-4080-90ac-a37243370dfe\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.167873 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key\") pod \"2c258498-1776-4080-90ac-a37243370dfe\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.168087 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory\") pod \"2c258498-1776-4080-90ac-a37243370dfe\" (UID: \"2c258498-1776-4080-90ac-a37243370dfe\") " Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.174078 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq" (OuterVolumeSpecName: "kube-api-access-mmskq") pod "2c258498-1776-4080-90ac-a37243370dfe" (UID: "2c258498-1776-4080-90ac-a37243370dfe"). InnerVolumeSpecName "kube-api-access-mmskq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.190798 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2c258498-1776-4080-90ac-a37243370dfe" (UID: "2c258498-1776-4080-90ac-a37243370dfe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.191169 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory" (OuterVolumeSpecName: "inventory") pod "2c258498-1776-4080-90ac-a37243370dfe" (UID: "2c258498-1776-4080-90ac-a37243370dfe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.272673 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.272713 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmskq\" (UniqueName: \"kubernetes.io/projected/2c258498-1776-4080-90ac-a37243370dfe-kube-api-access-mmskq\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.272726 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c258498-1776-4080-90ac-a37243370dfe-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.669906 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" event={"ID":"2c258498-1776-4080-90ac-a37243370dfe","Type":"ContainerDied","Data":"3c96af07be9c43613ab4dd4904ce5912d40d59c72375417e2c59ed470d6f6646"} Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.669981 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c96af07be9c43613ab4dd4904ce5912d40d59c72375417e2c59ed470d6f6646" Nov 26 11:37:15 crc kubenswrapper[4622]: I1126 11:37:15.669959 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml" Nov 26 11:37:16 crc kubenswrapper[4622]: I1126 11:37:16.716767 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bd2cbb6-f985-402e-845c-ea4f8e2f970e" path="/var/lib/kubelet/pods/6bd2cbb6-f985-402e-845c-ea4f8e2f970e/volumes" Nov 26 11:37:22 crc kubenswrapper[4622]: I1126 11:37:22.031351 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-j8fgr"] Nov 26 11:37:22 crc kubenswrapper[4622]: I1126 11:37:22.040225 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-j8fgr"] Nov 26 11:37:22 crc kubenswrapper[4622]: I1126 11:37:22.707278 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:37:22 crc kubenswrapper[4622]: E1126 11:37:22.709317 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:37:22 crc kubenswrapper[4622]: I1126 11:37:22.718991 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99de607d-316d-4435-b18a-c6eeb950da19" path="/var/lib/kubelet/pods/99de607d-316d-4435-b18a-c6eeb950da19/volumes" Nov 26 11:37:24 crc kubenswrapper[4622]: I1126 11:37:24.029624 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sq2s5"] Nov 26 11:37:24 crc kubenswrapper[4622]: I1126 11:37:24.037565 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sq2s5"] Nov 26 11:37:24 crc kubenswrapper[4622]: I1126 11:37:24.714062 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57b0b68f-f25e-417e-ae0f-55d2361b8df6" path="/var/lib/kubelet/pods/57b0b68f-f25e-417e-ae0f-55d2361b8df6/volumes" Nov 26 11:37:33 crc kubenswrapper[4622]: I1126 11:37:33.706591 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:37:33 crc kubenswrapper[4622]: E1126 11:37:33.708125 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.193680 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:37 crc kubenswrapper[4622]: E1126 11:37:37.194396 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c258498-1776-4080-90ac-a37243370dfe" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.194414 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c258498-1776-4080-90ac-a37243370dfe" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.194650 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c258498-1776-4080-90ac-a37243370dfe" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.196056 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.205700 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.351294 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.351366 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz7kk\" (UniqueName: \"kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.351764 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.454587 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.454665 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.454696 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz7kk\" (UniqueName: \"kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.455390 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.455481 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.474785 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz7kk\" (UniqueName: \"kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk\") pod \"redhat-operators-qtnkm\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.517690 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:37 crc kubenswrapper[4622]: I1126 11:37:37.928617 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:38 crc kubenswrapper[4622]: I1126 11:37:38.893168 4622 generic.go:334] "Generic (PLEG): container finished" podID="4b806661-8644-4c35-845e-2e7ef910cf91" containerID="19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509" exitCode=0 Nov 26 11:37:38 crc kubenswrapper[4622]: I1126 11:37:38.893233 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerDied","Data":"19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509"} Nov 26 11:37:38 crc kubenswrapper[4622]: I1126 11:37:38.893552 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerStarted","Data":"ce7e03890011bd9701bb6c09ef6445a21187bf76eb29df398940b759d3a000b7"} Nov 26 11:37:39 crc kubenswrapper[4622]: I1126 11:37:39.908356 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerStarted","Data":"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4"} Nov 26 11:37:41 crc kubenswrapper[4622]: I1126 11:37:41.927366 4622 generic.go:334] "Generic (PLEG): container finished" podID="4b806661-8644-4c35-845e-2e7ef910cf91" containerID="b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4" exitCode=0 Nov 26 11:37:41 crc kubenswrapper[4622]: I1126 11:37:41.927424 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerDied","Data":"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4"} Nov 26 11:37:42 crc kubenswrapper[4622]: I1126 11:37:42.937204 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerStarted","Data":"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1"} Nov 26 11:37:42 crc kubenswrapper[4622]: I1126 11:37:42.950859 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qtnkm" podStartSLOduration=2.441521108 podStartE2EDuration="5.950844391s" podCreationTimestamp="2025-11-26 11:37:37 +0000 UTC" firstStartedPulling="2025-11-26 11:37:38.894673651 +0000 UTC m=+1618.485885173" lastFinishedPulling="2025-11-26 11:37:42.403996934 +0000 UTC m=+1621.995208456" observedRunningTime="2025-11-26 11:37:42.949886163 +0000 UTC m=+1622.541097685" watchObservedRunningTime="2025-11-26 11:37:42.950844391 +0000 UTC m=+1622.542055912" Nov 26 11:37:43 crc kubenswrapper[4622]: I1126 11:37:43.790079 4622 scope.go:117] "RemoveContainer" containerID="59ddba8aff1d87dc060000a6ce208a22cd799cc69031d90bdba68314491abd62" Nov 26 11:37:43 crc kubenswrapper[4622]: I1126 11:37:43.841284 4622 scope.go:117] "RemoveContainer" containerID="f4c76e7d8077f210da0fa65bfad4dc1a3bf17465478bb43b49d6e41e30bcdab2" Nov 26 11:37:43 crc kubenswrapper[4622]: I1126 11:37:43.871354 4622 scope.go:117] "RemoveContainer" containerID="4b4b4dc95ecc57fcd244dcf874d755e7b0c55f3a0d6b2066b31c5b6b9ddfdef8" Nov 26 11:37:43 crc kubenswrapper[4622]: I1126 11:37:43.940297 4622 scope.go:117] "RemoveContainer" containerID="288da523a4df50e70f490a81f781f5ff9e80284482b2b2e02c8f5e38ecdc533b" Nov 26 11:37:43 crc kubenswrapper[4622]: I1126 11:37:43.973979 4622 scope.go:117] "RemoveContainer" containerID="69c9733686ce870a7e858d8928dab96e5c068832ac1342d109bb4d7b2d83c958" Nov 26 11:37:47 crc kubenswrapper[4622]: I1126 11:37:47.518674 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:47 crc kubenswrapper[4622]: I1126 11:37:47.519284 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:47 crc kubenswrapper[4622]: I1126 11:37:47.559595 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:48 crc kubenswrapper[4622]: I1126 11:37:48.029321 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:48 crc kubenswrapper[4622]: I1126 11:37:48.071863 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:48 crc kubenswrapper[4622]: I1126 11:37:48.706839 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:37:48 crc kubenswrapper[4622]: E1126 11:37:48.707886 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.018305 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qtnkm" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="registry-server" containerID="cri-o://0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1" gracePeriod=2 Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.403721 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.534436 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz7kk\" (UniqueName: \"kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk\") pod \"4b806661-8644-4c35-845e-2e7ef910cf91\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.534931 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content\") pod \"4b806661-8644-4c35-845e-2e7ef910cf91\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.535191 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities\") pod \"4b806661-8644-4c35-845e-2e7ef910cf91\" (UID: \"4b806661-8644-4c35-845e-2e7ef910cf91\") " Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.536104 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities" (OuterVolumeSpecName: "utilities") pod "4b806661-8644-4c35-845e-2e7ef910cf91" (UID: "4b806661-8644-4c35-845e-2e7ef910cf91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.537282 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.541715 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk" (OuterVolumeSpecName: "kube-api-access-hz7kk") pod "4b806661-8644-4c35-845e-2e7ef910cf91" (UID: "4b806661-8644-4c35-845e-2e7ef910cf91"). InnerVolumeSpecName "kube-api-access-hz7kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.609053 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b806661-8644-4c35-845e-2e7ef910cf91" (UID: "4b806661-8644-4c35-845e-2e7ef910cf91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.641548 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b806661-8644-4c35-845e-2e7ef910cf91-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:50 crc kubenswrapper[4622]: I1126 11:37:50.642028 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz7kk\" (UniqueName: \"kubernetes.io/projected/4b806661-8644-4c35-845e-2e7ef910cf91-kube-api-access-hz7kk\") on node \"crc\" DevicePath \"\"" Nov 26 11:37:50 crc kubenswrapper[4622]: E1126 11:37:50.813241 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b806661_8644_4c35_845e_2e7ef910cf91.slice\": RecentStats: unable to find data in memory cache]" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.030971 4622 generic.go:334] "Generic (PLEG): container finished" podID="4b806661-8644-4c35-845e-2e7ef910cf91" containerID="0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1" exitCode=0 Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.031068 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qtnkm" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.031053 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerDied","Data":"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1"} Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.031233 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qtnkm" event={"ID":"4b806661-8644-4c35-845e-2e7ef910cf91","Type":"ContainerDied","Data":"ce7e03890011bd9701bb6c09ef6445a21187bf76eb29df398940b759d3a000b7"} Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.031258 4622 scope.go:117] "RemoveContainer" containerID="0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.053833 4622 scope.go:117] "RemoveContainer" containerID="b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.057581 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.065663 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qtnkm"] Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.070970 4622 scope.go:117] "RemoveContainer" containerID="19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.101938 4622 scope.go:117] "RemoveContainer" containerID="0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1" Nov 26 11:37:51 crc kubenswrapper[4622]: E1126 11:37:51.102493 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1\": container with ID starting with 0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1 not found: ID does not exist" containerID="0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.102552 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1"} err="failed to get container status \"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1\": rpc error: code = NotFound desc = could not find container \"0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1\": container with ID starting with 0dd77ff72b946f986aedb79a3a243e7608bed3c4888d99ad322530dde986b9d1 not found: ID does not exist" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.102584 4622 scope.go:117] "RemoveContainer" containerID="b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4" Nov 26 11:37:51 crc kubenswrapper[4622]: E1126 11:37:51.102877 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4\": container with ID starting with b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4 not found: ID does not exist" containerID="b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.102914 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4"} err="failed to get container status \"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4\": rpc error: code = NotFound desc = could not find container \"b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4\": container with ID starting with b1500181293b92bda25555b2edff36bdaaeaaaf9cf125f1905d78255d1130ee4 not found: ID does not exist" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.102939 4622 scope.go:117] "RemoveContainer" containerID="19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509" Nov 26 11:37:51 crc kubenswrapper[4622]: E1126 11:37:51.103189 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509\": container with ID starting with 19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509 not found: ID does not exist" containerID="19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509" Nov 26 11:37:51 crc kubenswrapper[4622]: I1126 11:37:51.103229 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509"} err="failed to get container status \"19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509\": rpc error: code = NotFound desc = could not find container \"19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509\": container with ID starting with 19bfea5d87b7097ff5d3d48f147624086e3aa575542c31be2fbdeed8cadc0509 not found: ID does not exist" Nov 26 11:37:52 crc kubenswrapper[4622]: I1126 11:37:52.720484 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" path="/var/lib/kubelet/pods/4b806661-8644-4c35-845e-2e7ef910cf91/volumes" Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.039013 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-74vvh"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.046493 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-0293-account-create-update-9dthn"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.053997 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-4zjlx"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.059158 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2c73-account-create-update-294sk"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.063939 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-74vvh"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.071581 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-0293-account-create-update-9dthn"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.079129 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-8jkdp"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.085994 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-4zjlx"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.094043 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2c73-account-create-update-294sk"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.101440 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-8jkdp"] Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.715530 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fc21caf-ae0d-4608-9437-f5180cf104c5" path="/var/lib/kubelet/pods/1fc21caf-ae0d-4608-9437-f5180cf104c5/volumes" Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.716487 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="456ff799-1844-44ae-ac17-fb35b70ca9a4" path="/var/lib/kubelet/pods/456ff799-1844-44ae-ac17-fb35b70ca9a4/volumes" Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.717086 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a55622c-83da-4690-a510-1310eb081529" path="/var/lib/kubelet/pods/8a55622c-83da-4690-a510-1310eb081529/volumes" Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.717682 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5b6f9c-6f22-49fb-8506-3f7b25e7752f" path="/var/lib/kubelet/pods/bc5b6f9c-6f22-49fb-8506-3f7b25e7752f/volumes" Nov 26 11:37:54 crc kubenswrapper[4622]: I1126 11:37:54.718756 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d285966c-61fa-4be9-a16d-7aa8d0edcfe5" path="/var/lib/kubelet/pods/d285966c-61fa-4be9-a16d-7aa8d0edcfe5/volumes" Nov 26 11:37:55 crc kubenswrapper[4622]: I1126 11:37:55.031104 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-8ef7-account-create-update-xcz5c"] Nov 26 11:37:55 crc kubenswrapper[4622]: I1126 11:37:55.038465 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-8ef7-account-create-update-xcz5c"] Nov 26 11:37:56 crc kubenswrapper[4622]: I1126 11:37:56.715620 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96413beb-fc9e-42bd-a4b1-0542d3d48944" path="/var/lib/kubelet/pods/96413beb-fc9e-42bd-a4b1-0542d3d48944/volumes" Nov 26 11:38:02 crc kubenswrapper[4622]: I1126 11:38:02.706849 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:38:02 crc kubenswrapper[4622]: E1126 11:38:02.707631 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:38:14 crc kubenswrapper[4622]: I1126 11:38:14.033969 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kk9n7"] Nov 26 11:38:14 crc kubenswrapper[4622]: I1126 11:38:14.041030 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kk9n7"] Nov 26 11:38:14 crc kubenswrapper[4622]: I1126 11:38:14.716311 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8" path="/var/lib/kubelet/pods/e1e4b7f5-c6ef-46e6-baa1-e7b160e28ec8/volumes" Nov 26 11:38:16 crc kubenswrapper[4622]: I1126 11:38:16.707353 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:38:16 crc kubenswrapper[4622]: E1126 11:38:16.708142 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:38:28 crc kubenswrapper[4622]: I1126 11:38:28.705762 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:38:28 crc kubenswrapper[4622]: E1126 11:38:28.706688 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:38:33 crc kubenswrapper[4622]: I1126 11:38:33.027352 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nnbm5"] Nov 26 11:38:33 crc kubenswrapper[4622]: I1126 11:38:33.033526 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nnbm5"] Nov 26 11:38:34 crc kubenswrapper[4622]: I1126 11:38:34.716349 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="497319a3-f556-4fbc-8c50-66ddff723bda" path="/var/lib/kubelet/pods/497319a3-f556-4fbc-8c50-66ddff723bda/volumes" Nov 26 11:38:35 crc kubenswrapper[4622]: I1126 11:38:35.026402 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-w8f5q"] Nov 26 11:38:35 crc kubenswrapper[4622]: I1126 11:38:35.032400 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-w8f5q"] Nov 26 11:38:36 crc kubenswrapper[4622]: I1126 11:38:36.716082 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c98829f1-bc94-4867-9c57-2caadb2ae3ae" path="/var/lib/kubelet/pods/c98829f1-bc94-4867-9c57-2caadb2ae3ae/volumes" Nov 26 11:38:39 crc kubenswrapper[4622]: I1126 11:38:39.707023 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:38:39 crc kubenswrapper[4622]: E1126 11:38:39.707855 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.082026 4622 scope.go:117] "RemoveContainer" containerID="5dd883989a13cea719f72bafd4f4b35a117827968850a9bf94190c8b3d23b0b2" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.103983 4622 scope.go:117] "RemoveContainer" containerID="c0548d4e21e88578a05cd7fced3dbaa84506f065eca40e3c12995178172fe0e2" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.144928 4622 scope.go:117] "RemoveContainer" containerID="193dd062fac70c2c1ea0d077fdcea4a1c690d39158d575f3f22b0c979dd5a3a4" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.179603 4622 scope.go:117] "RemoveContainer" containerID="f9abdcd9f52fac81d8fed4870fae13a5b73725c3fb4ff0d07108b2da56827511" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.219489 4622 scope.go:117] "RemoveContainer" containerID="625a3515b6940cdf5cb533148aa8bc8f493bf72916d2d1077d8560ad3e012c31" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.280094 4622 scope.go:117] "RemoveContainer" containerID="6fba37a5e02e65b15833f2f028dd820de9a60d7acd88c747abf6a665b4164c02" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.316836 4622 scope.go:117] "RemoveContainer" containerID="b0d4492693c3367cfc43dd4def5bdffaea248e075c76642e18529a7c01b7d04c" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.352816 4622 scope.go:117] "RemoveContainer" containerID="bd610e581b6879364acebf98dabadd3a9b2ef269d025c654ba7b2eb9da4b79c7" Nov 26 11:38:44 crc kubenswrapper[4622]: I1126 11:38:44.373993 4622 scope.go:117] "RemoveContainer" containerID="19f1d26598fac07827e23ed03ca583478dcb031f0984397d3c905ffdb3ae98b9" Nov 26 11:38:52 crc kubenswrapper[4622]: I1126 11:38:52.706930 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:38:52 crc kubenswrapper[4622]: E1126 11:38:52.707890 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:39:05 crc kubenswrapper[4622]: I1126 11:39:05.707447 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:39:05 crc kubenswrapper[4622]: E1126 11:39:05.708582 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:39:19 crc kubenswrapper[4622]: I1126 11:39:19.032020 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-cnkv2"] Nov 26 11:39:19 crc kubenswrapper[4622]: I1126 11:39:19.038576 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-cnkv2"] Nov 26 11:39:20 crc kubenswrapper[4622]: I1126 11:39:20.710264 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:39:20 crc kubenswrapper[4622]: E1126 11:39:20.711092 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:39:20 crc kubenswrapper[4622]: I1126 11:39:20.714377 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c" path="/var/lib/kubelet/pods/9702ef2d-d47c-4e61-a8cb-c5e2b8ccd03c/volumes" Nov 26 11:39:33 crc kubenswrapper[4622]: I1126 11:39:33.705695 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:39:33 crc kubenswrapper[4622]: E1126 11:39:33.706244 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:39:44 crc kubenswrapper[4622]: I1126 11:39:44.517151 4622 scope.go:117] "RemoveContainer" containerID="c07fcec673f0d8eaafa07191c76371194dfe82e9ed385a67365d44bc8fe67576" Nov 26 11:39:47 crc kubenswrapper[4622]: I1126 11:39:47.706025 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:39:47 crc kubenswrapper[4622]: E1126 11:39:47.706581 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:39:58 crc kubenswrapper[4622]: I1126 11:39:58.707434 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:39:58 crc kubenswrapper[4622]: E1126 11:39:58.708469 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:40:12 crc kubenswrapper[4622]: I1126 11:40:12.706719 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:40:12 crc kubenswrapper[4622]: E1126 11:40:12.707354 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:40:26 crc kubenswrapper[4622]: I1126 11:40:26.708218 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:40:27 crc kubenswrapper[4622]: I1126 11:40:27.329139 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f"} Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.320304 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.341622 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.351633 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.357626 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fkf6t"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.364031 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.370450 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.376172 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.381161 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.385900 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.390590 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-lww62"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.394792 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.399103 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ff7zb"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.403391 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dz7hz"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.407607 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-q7r5p"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.412202 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j5vr8"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.416371 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jvrh5"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.420869 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56fl5"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.425190 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-f9n9g"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.429661 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fkf6t"] Nov 26 11:40:53 crc kubenswrapper[4622]: I1126 11:40:53.434224 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-gr9ml"] Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.715089 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01028f5e-ea5c-415f-9c3b-bdf59f457db9" path="/var/lib/kubelet/pods/01028f5e-ea5c-415f-9c3b-bdf59f457db9/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.716024 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d76be6-49bc-4d2d-b062-726853c7c6e1" path="/var/lib/kubelet/pods/07d76be6-49bc-4d2d-b062-726853c7c6e1/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.716649 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c258498-1776-4080-90ac-a37243370dfe" path="/var/lib/kubelet/pods/2c258498-1776-4080-90ac-a37243370dfe/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.717212 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e7303f2-ffb9-44ef-ac57-0b84f87fdca4" path="/var/lib/kubelet/pods/2e7303f2-ffb9-44ef-ac57-0b84f87fdca4/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.718473 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c88ddbe-6a37-4a2c-b6ed-1aa193644050" path="/var/lib/kubelet/pods/3c88ddbe-6a37-4a2c-b6ed-1aa193644050/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.719043 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ec8f07f-cde3-45c1-a9b1-2d21751b3026" path="/var/lib/kubelet/pods/5ec8f07f-cde3-45c1-a9b1-2d21751b3026/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.719585 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70174cd5-313d-4acf-afd4-6da6f13161ad" path="/var/lib/kubelet/pods/70174cd5-313d-4acf-afd4-6da6f13161ad/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.720606 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="829ddce0-8433-4217-b8a4-945579a8421c" path="/var/lib/kubelet/pods/829ddce0-8433-4217-b8a4-945579a8421c/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.721111 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="999f62ec-225d-4c9b-a142-71faced65384" path="/var/lib/kubelet/pods/999f62ec-225d-4c9b-a142-71faced65384/volumes" Nov 26 11:40:54 crc kubenswrapper[4622]: I1126 11:40:54.721649 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9b52e72-9a14-4f7c-b704-2dd85b2f4568" path="/var/lib/kubelet/pods/f9b52e72-9a14-4f7c-b704-2dd85b2f4568/volumes" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.546876 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm"] Nov 26 11:40:58 crc kubenswrapper[4622]: E1126 11:40:58.547552 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="extract-content" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.547565 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="extract-content" Nov 26 11:40:58 crc kubenswrapper[4622]: E1126 11:40:58.547595 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="extract-utilities" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.547600 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="extract-utilities" Nov 26 11:40:58 crc kubenswrapper[4622]: E1126 11:40:58.547614 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="registry-server" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.547620 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="registry-server" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.547781 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b806661-8644-4c35-845e-2e7ef910cf91" containerName="registry-server" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.548314 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.550409 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.555342 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.555473 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.555570 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.555480 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.557666 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm"] Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.619055 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b47d\" (UniqueName: \"kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.619100 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.619256 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.619409 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.619482 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.721400 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.721590 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.721658 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.721735 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b47d\" (UniqueName: \"kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.721774 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.729014 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.729099 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.729577 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.729796 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.736655 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b47d\" (UniqueName: \"kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:58 crc kubenswrapper[4622]: I1126 11:40:58.869165 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:40:59 crc kubenswrapper[4622]: I1126 11:40:59.309571 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm"] Nov 26 11:40:59 crc kubenswrapper[4622]: I1126 11:40:59.314732 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:40:59 crc kubenswrapper[4622]: I1126 11:40:59.552581 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" event={"ID":"970a7d0b-90b1-4d00-9304-6ac2c06fb675","Type":"ContainerStarted","Data":"03e1195e38273e0b547924e6e78c6cd4f099cdac5b7b3f69ff58fd18b6b978ae"} Nov 26 11:41:00 crc kubenswrapper[4622]: I1126 11:41:00.564816 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" event={"ID":"970a7d0b-90b1-4d00-9304-6ac2c06fb675","Type":"ContainerStarted","Data":"f135e2debef4724abfc05bda92604a12fe98392961ea8f2fc3830842ddb3f45a"} Nov 26 11:41:00 crc kubenswrapper[4622]: I1126 11:41:00.584793 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" podStartSLOduration=2.002200473 podStartE2EDuration="2.584779126s" podCreationTimestamp="2025-11-26 11:40:58 +0000 UTC" firstStartedPulling="2025-11-26 11:40:59.314522189 +0000 UTC m=+1818.905733711" lastFinishedPulling="2025-11-26 11:40:59.897100842 +0000 UTC m=+1819.488312364" observedRunningTime="2025-11-26 11:41:00.580979587 +0000 UTC m=+1820.172191109" watchObservedRunningTime="2025-11-26 11:41:00.584779126 +0000 UTC m=+1820.175990647" Nov 26 11:41:09 crc kubenswrapper[4622]: I1126 11:41:09.631596 4622 generic.go:334] "Generic (PLEG): container finished" podID="970a7d0b-90b1-4d00-9304-6ac2c06fb675" containerID="f135e2debef4724abfc05bda92604a12fe98392961ea8f2fc3830842ddb3f45a" exitCode=0 Nov 26 11:41:09 crc kubenswrapper[4622]: I1126 11:41:09.631730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" event={"ID":"970a7d0b-90b1-4d00-9304-6ac2c06fb675","Type":"ContainerDied","Data":"f135e2debef4724abfc05bda92604a12fe98392961ea8f2fc3830842ddb3f45a"} Nov 26 11:41:10 crc kubenswrapper[4622]: I1126 11:41:10.938519 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.015654 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle\") pod \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.015693 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key\") pod \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.015795 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph\") pod \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.015867 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b47d\" (UniqueName: \"kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d\") pod \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.015892 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory\") pod \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\" (UID: \"970a7d0b-90b1-4d00-9304-6ac2c06fb675\") " Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.021018 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d" (OuterVolumeSpecName: "kube-api-access-2b47d") pod "970a7d0b-90b1-4d00-9304-6ac2c06fb675" (UID: "970a7d0b-90b1-4d00-9304-6ac2c06fb675"). InnerVolumeSpecName "kube-api-access-2b47d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.021748 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "970a7d0b-90b1-4d00-9304-6ac2c06fb675" (UID: "970a7d0b-90b1-4d00-9304-6ac2c06fb675"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.021761 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph" (OuterVolumeSpecName: "ceph") pod "970a7d0b-90b1-4d00-9304-6ac2c06fb675" (UID: "970a7d0b-90b1-4d00-9304-6ac2c06fb675"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.038516 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory" (OuterVolumeSpecName: "inventory") pod "970a7d0b-90b1-4d00-9304-6ac2c06fb675" (UID: "970a7d0b-90b1-4d00-9304-6ac2c06fb675"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.038649 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "970a7d0b-90b1-4d00-9304-6ac2c06fb675" (UID: "970a7d0b-90b1-4d00-9304-6ac2c06fb675"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.118773 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.118806 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b47d\" (UniqueName: \"kubernetes.io/projected/970a7d0b-90b1-4d00-9304-6ac2c06fb675-kube-api-access-2b47d\") on node \"crc\" DevicePath \"\"" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.118817 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.118830 4622 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.118841 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/970a7d0b-90b1-4d00-9304-6ac2c06fb675-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.647567 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" event={"ID":"970a7d0b-90b1-4d00-9304-6ac2c06fb675","Type":"ContainerDied","Data":"03e1195e38273e0b547924e6e78c6cd4f099cdac5b7b3f69ff58fd18b6b978ae"} Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.647809 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03e1195e38273e0b547924e6e78c6cd4f099cdac5b7b3f69ff58fd18b6b978ae" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.647636 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.710123 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff"] Nov 26 11:41:11 crc kubenswrapper[4622]: E1126 11:41:11.710566 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="970a7d0b-90b1-4d00-9304-6ac2c06fb675" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.710587 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="970a7d0b-90b1-4d00-9304-6ac2c06fb675" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.710796 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="970a7d0b-90b1-4d00-9304-6ac2c06fb675" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.711418 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.712647 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.713042 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.714213 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.714395 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.715067 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.716551 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff"] Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.831537 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crkqr\" (UniqueName: \"kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.831712 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.831794 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.832279 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.832551 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.933980 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crkqr\" (UniqueName: \"kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.934046 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.934082 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.934101 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.934134 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.937845 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.937883 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.938241 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.938726 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:11 crc kubenswrapper[4622]: I1126 11:41:11.946247 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crkqr\" (UniqueName: \"kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-757ff\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:12 crc kubenswrapper[4622]: I1126 11:41:12.033302 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:41:12 crc kubenswrapper[4622]: I1126 11:41:12.483348 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff"] Nov 26 11:41:12 crc kubenswrapper[4622]: I1126 11:41:12.654928 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" event={"ID":"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd","Type":"ContainerStarted","Data":"0a8e3f37ac5cf18a6cdc6ffeb17323597fc30380e5b164fde74519c4d70bfa24"} Nov 26 11:41:13 crc kubenswrapper[4622]: I1126 11:41:13.668789 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" event={"ID":"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd","Type":"ContainerStarted","Data":"d26704510a3d31910fa310f5b3582420113eb98e94a1b787d3fa85b67ea1da09"} Nov 26 11:41:13 crc kubenswrapper[4622]: I1126 11:41:13.687371 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" podStartSLOduration=2.078990199 podStartE2EDuration="2.687356383s" podCreationTimestamp="2025-11-26 11:41:11 +0000 UTC" firstStartedPulling="2025-11-26 11:41:12.487424452 +0000 UTC m=+1832.078635973" lastFinishedPulling="2025-11-26 11:41:13.095790636 +0000 UTC m=+1832.687002157" observedRunningTime="2025-11-26 11:41:13.683326029 +0000 UTC m=+1833.274537552" watchObservedRunningTime="2025-11-26 11:41:13.687356383 +0000 UTC m=+1833.278567905" Nov 26 11:41:44 crc kubenswrapper[4622]: I1126 11:41:44.595761 4622 scope.go:117] "RemoveContainer" containerID="4ad1d8661bdeb2025409de8b92c260de6db9221dff90506e0044c5aae6500a6a" Nov 26 11:41:44 crc kubenswrapper[4622]: I1126 11:41:44.622868 4622 scope.go:117] "RemoveContainer" containerID="40baf503f7d59c9feba672a8650dfb4a9378655f0feb29ec760edb2ab4e69a51" Nov 26 11:41:44 crc kubenswrapper[4622]: I1126 11:41:44.667314 4622 scope.go:117] "RemoveContainer" containerID="65a525ae10851f73fcb96b845f1d2c380cbbd3e4698790249966932536c5b9b0" Nov 26 11:41:44 crc kubenswrapper[4622]: I1126 11:41:44.721231 4622 scope.go:117] "RemoveContainer" containerID="071aaf0f86fbc29ef8b509e2f6e7386a0be606fa6768f3925f10a41f3ac8223f" Nov 26 11:41:44 crc kubenswrapper[4622]: I1126 11:41:44.764741 4622 scope.go:117] "RemoveContainer" containerID="7c216adaf12f7b584e3ca83221992512c654bc3ee77a2b4b46db329f7ef14cbd" Nov 26 11:42:34 crc kubenswrapper[4622]: I1126 11:42:34.325761 4622 generic.go:334] "Generic (PLEG): container finished" podID="f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" containerID="d26704510a3d31910fa310f5b3582420113eb98e94a1b787d3fa85b67ea1da09" exitCode=0 Nov 26 11:42:34 crc kubenswrapper[4622]: I1126 11:42:34.325844 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" event={"ID":"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd","Type":"ContainerDied","Data":"d26704510a3d31910fa310f5b3582420113eb98e94a1b787d3fa85b67ea1da09"} Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.648066 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.740244 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key\") pod \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.740302 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle\") pod \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.740381 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph\") pod \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.740433 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crkqr\" (UniqueName: \"kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr\") pod \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.740468 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory\") pod \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\" (UID: \"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd\") " Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.746808 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph" (OuterVolumeSpecName: "ceph") pod "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" (UID: "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.746877 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr" (OuterVolumeSpecName: "kube-api-access-crkqr") pod "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" (UID: "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd"). InnerVolumeSpecName "kube-api-access-crkqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.748237 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" (UID: "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.764094 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory" (OuterVolumeSpecName: "inventory") pod "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" (UID: "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.785638 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" (UID: "f8ee3dad-287f-4ba8-9b48-0eb36bdabafd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.843598 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.843631 4622 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.843647 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.843658 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crkqr\" (UniqueName: \"kubernetes.io/projected/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-kube-api-access-crkqr\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:35 crc kubenswrapper[4622]: I1126 11:42:35.843666 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ee3dad-287f-4ba8-9b48-0eb36bdabafd-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.344678 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" event={"ID":"f8ee3dad-287f-4ba8-9b48-0eb36bdabafd","Type":"ContainerDied","Data":"0a8e3f37ac5cf18a6cdc6ffeb17323597fc30380e5b164fde74519c4d70bfa24"} Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.344723 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a8e3f37ac5cf18a6cdc6ffeb17323597fc30380e5b164fde74519c4d70bfa24" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.344729 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-757ff" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.412225 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv"] Nov 26 11:42:36 crc kubenswrapper[4622]: E1126 11:42:36.414489 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.414533 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.414744 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ee3dad-287f-4ba8-9b48-0eb36bdabafd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.415465 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.417989 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.418149 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.418358 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.418435 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.421404 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.425445 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv"] Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.457451 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.457552 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.457648 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.457858 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf45m\" (UniqueName: \"kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: E1126 11:42:36.540994 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ee3dad_287f_4ba8_9b48_0eb36bdabafd.slice/crio-0a8e3f37ac5cf18a6cdc6ffeb17323597fc30380e5b164fde74519c4d70bfa24\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ee3dad_287f_4ba8_9b48_0eb36bdabafd.slice\": RecentStats: unable to find data in memory cache]" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.560151 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf45m\" (UniqueName: \"kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.560334 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.560393 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.560439 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.565566 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.565979 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.566269 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.574798 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf45m\" (UniqueName: \"kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:36 crc kubenswrapper[4622]: I1126 11:42:36.733387 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:37 crc kubenswrapper[4622]: I1126 11:42:37.238309 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv"] Nov 26 11:42:37 crc kubenswrapper[4622]: I1126 11:42:37.353074 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" event={"ID":"ddb85683-5e25-4c50-8df2-7f7fee07588d","Type":"ContainerStarted","Data":"c2ca7c7bf6b478f68a23da53e5ee50c6150af202e8e0f4ab5862966e140cf0e3"} Nov 26 11:42:38 crc kubenswrapper[4622]: I1126 11:42:38.361729 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" event={"ID":"ddb85683-5e25-4c50-8df2-7f7fee07588d","Type":"ContainerStarted","Data":"e94f008dd006137a6bfc11c85b200508ff07955cd93d374ab84f539583646c88"} Nov 26 11:42:38 crc kubenswrapper[4622]: I1126 11:42:38.379950 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" podStartSLOduration=1.700487597 podStartE2EDuration="2.379930695s" podCreationTimestamp="2025-11-26 11:42:36 +0000 UTC" firstStartedPulling="2025-11-26 11:42:37.243148904 +0000 UTC m=+1916.834360425" lastFinishedPulling="2025-11-26 11:42:37.922592 +0000 UTC m=+1917.513803523" observedRunningTime="2025-11-26 11:42:38.374654831 +0000 UTC m=+1917.965866354" watchObservedRunningTime="2025-11-26 11:42:38.379930695 +0000 UTC m=+1917.971142237" Nov 26 11:42:44 crc kubenswrapper[4622]: I1126 11:42:44.893394 4622 scope.go:117] "RemoveContainer" containerID="736650ede3413ef0282995ac0b1dbaaa3dc8bebdf58e462ea7baa9b4214451ee" Nov 26 11:42:44 crc kubenswrapper[4622]: I1126 11:42:44.939311 4622 scope.go:117] "RemoveContainer" containerID="4d7a8510101bc50dbbe80a4bf50c03e122e04cbd6c5cf18e5eb73f0fb1bbd94b" Nov 26 11:42:45 crc kubenswrapper[4622]: I1126 11:42:45.199232 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:42:45 crc kubenswrapper[4622]: I1126 11:42:45.199519 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:42:56 crc kubenswrapper[4622]: I1126 11:42:56.496226 4622 generic.go:334] "Generic (PLEG): container finished" podID="ddb85683-5e25-4c50-8df2-7f7fee07588d" containerID="e94f008dd006137a6bfc11c85b200508ff07955cd93d374ab84f539583646c88" exitCode=0 Nov 26 11:42:56 crc kubenswrapper[4622]: I1126 11:42:56.496310 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" event={"ID":"ddb85683-5e25-4c50-8df2-7f7fee07588d","Type":"ContainerDied","Data":"e94f008dd006137a6bfc11c85b200508ff07955cd93d374ab84f539583646c88"} Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.821058 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.943551 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory\") pod \"ddb85683-5e25-4c50-8df2-7f7fee07588d\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.943583 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key\") pod \"ddb85683-5e25-4c50-8df2-7f7fee07588d\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.943638 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf45m\" (UniqueName: \"kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m\") pod \"ddb85683-5e25-4c50-8df2-7f7fee07588d\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.943720 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph\") pod \"ddb85683-5e25-4c50-8df2-7f7fee07588d\" (UID: \"ddb85683-5e25-4c50-8df2-7f7fee07588d\") " Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.949169 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m" (OuterVolumeSpecName: "kube-api-access-gf45m") pod "ddb85683-5e25-4c50-8df2-7f7fee07588d" (UID: "ddb85683-5e25-4c50-8df2-7f7fee07588d"). InnerVolumeSpecName "kube-api-access-gf45m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.949188 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph" (OuterVolumeSpecName: "ceph") pod "ddb85683-5e25-4c50-8df2-7f7fee07588d" (UID: "ddb85683-5e25-4c50-8df2-7f7fee07588d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.965038 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory" (OuterVolumeSpecName: "inventory") pod "ddb85683-5e25-4c50-8df2-7f7fee07588d" (UID: "ddb85683-5e25-4c50-8df2-7f7fee07588d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:57 crc kubenswrapper[4622]: I1126 11:42:57.967264 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ddb85683-5e25-4c50-8df2-7f7fee07588d" (UID: "ddb85683-5e25-4c50-8df2-7f7fee07588d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.045206 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.045231 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.045244 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf45m\" (UniqueName: \"kubernetes.io/projected/ddb85683-5e25-4c50-8df2-7f7fee07588d-kube-api-access-gf45m\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.045255 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ddb85683-5e25-4c50-8df2-7f7fee07588d-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.518782 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" event={"ID":"ddb85683-5e25-4c50-8df2-7f7fee07588d","Type":"ContainerDied","Data":"c2ca7c7bf6b478f68a23da53e5ee50c6150af202e8e0f4ab5862966e140cf0e3"} Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.519022 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2ca7c7bf6b478f68a23da53e5ee50c6150af202e8e0f4ab5862966e140cf0e3" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.518833 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.579188 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx"] Nov 26 11:42:58 crc kubenswrapper[4622]: E1126 11:42:58.579532 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb85683-5e25-4c50-8df2-7f7fee07588d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.579548 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb85683-5e25-4c50-8df2-7f7fee07588d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.579707 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddb85683-5e25-4c50-8df2-7f7fee07588d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.580268 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.582683 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.582969 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.583119 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.583489 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.585805 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.598309 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx"] Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.661381 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mk4k\" (UniqueName: \"kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.661469 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.661621 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.661700 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.763155 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.763272 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.763331 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.763370 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mk4k\" (UniqueName: \"kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.766619 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.766996 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.768117 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.776942 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mk4k\" (UniqueName: \"kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:58 crc kubenswrapper[4622]: I1126 11:42:58.897546 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:42:59 crc kubenswrapper[4622]: I1126 11:42:59.342702 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx"] Nov 26 11:42:59 crc kubenswrapper[4622]: W1126 11:42:59.346363 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0b0eff7_0c1f_4dff_a8b5_5f957caae0bb.slice/crio-7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e WatchSource:0}: Error finding container 7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e: Status 404 returned error can't find the container with id 7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e Nov 26 11:42:59 crc kubenswrapper[4622]: I1126 11:42:59.526427 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" event={"ID":"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb","Type":"ContainerStarted","Data":"7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e"} Nov 26 11:43:00 crc kubenswrapper[4622]: I1126 11:43:00.535303 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" event={"ID":"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb","Type":"ContainerStarted","Data":"1e0111e4af425b767224b90d702fe62336e234fa8cb577634d80419060373d6a"} Nov 26 11:43:00 crc kubenswrapper[4622]: I1126 11:43:00.548081 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" podStartSLOduration=1.5505513739999999 podStartE2EDuration="2.548065102s" podCreationTimestamp="2025-11-26 11:42:58 +0000 UTC" firstStartedPulling="2025-11-26 11:42:59.348453215 +0000 UTC m=+1938.939664738" lastFinishedPulling="2025-11-26 11:43:00.345966944 +0000 UTC m=+1939.937178466" observedRunningTime="2025-11-26 11:43:00.546845422 +0000 UTC m=+1940.138056944" watchObservedRunningTime="2025-11-26 11:43:00.548065102 +0000 UTC m=+1940.139276625" Nov 26 11:43:04 crc kubenswrapper[4622]: I1126 11:43:04.566078 4622 generic.go:334] "Generic (PLEG): container finished" podID="e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" containerID="1e0111e4af425b767224b90d702fe62336e234fa8cb577634d80419060373d6a" exitCode=0 Nov 26 11:43:04 crc kubenswrapper[4622]: I1126 11:43:04.566167 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" event={"ID":"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb","Type":"ContainerDied","Data":"1e0111e4af425b767224b90d702fe62336e234fa8cb577634d80419060373d6a"} Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.900400 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.979788 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key\") pod \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.979923 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mk4k\" (UniqueName: \"kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k\") pod \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.979946 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory\") pod \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.980064 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph\") pod \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\" (UID: \"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb\") " Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.985152 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k" (OuterVolumeSpecName: "kube-api-access-2mk4k") pod "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" (UID: "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb"). InnerVolumeSpecName "kube-api-access-2mk4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:43:05 crc kubenswrapper[4622]: I1126 11:43:05.985418 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph" (OuterVolumeSpecName: "ceph") pod "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" (UID: "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.001474 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory" (OuterVolumeSpecName: "inventory") pod "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" (UID: "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.002005 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" (UID: "e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.083957 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mk4k\" (UniqueName: \"kubernetes.io/projected/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-kube-api-access-2mk4k\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.083993 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.084004 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.084013 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.582904 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" event={"ID":"e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb","Type":"ContainerDied","Data":"7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e"} Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.582945 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cd94bbb70b9902cc7b92a1a57688f944cd461ca68e6480e27cfae2da6bbe85e" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.582956 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.640343 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46"] Nov 26 11:43:06 crc kubenswrapper[4622]: E1126 11:43:06.640795 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.640819 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.641025 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.641712 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.648903 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.648997 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.649101 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.649799 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.650287 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46"] Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.652220 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.696354 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.696529 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.696678 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.696842 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjqz2\" (UniqueName: \"kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.800218 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.800361 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.800488 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.800869 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjqz2\" (UniqueName: \"kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.804116 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.804337 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.805466 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.816528 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjqz2\" (UniqueName: \"kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g4z46\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:06 crc kubenswrapper[4622]: I1126 11:43:06.958963 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:07 crc kubenswrapper[4622]: I1126 11:43:07.415897 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46"] Nov 26 11:43:07 crc kubenswrapper[4622]: W1126 11:43:07.420610 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fb2686_cd66_4e6e_ac7f_466cf5211f3e.slice/crio-eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8 WatchSource:0}: Error finding container eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8: Status 404 returned error can't find the container with id eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8 Nov 26 11:43:07 crc kubenswrapper[4622]: I1126 11:43:07.589966 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" event={"ID":"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e","Type":"ContainerStarted","Data":"eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8"} Nov 26 11:43:08 crc kubenswrapper[4622]: I1126 11:43:08.598374 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" event={"ID":"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e","Type":"ContainerStarted","Data":"2d90adb9374c09385d9d8733c6122bf25b2202189830d86e9c01fc080bd13b30"} Nov 26 11:43:08 crc kubenswrapper[4622]: I1126 11:43:08.613331 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" podStartSLOduration=2.020334224 podStartE2EDuration="2.613316257s" podCreationTimestamp="2025-11-26 11:43:06 +0000 UTC" firstStartedPulling="2025-11-26 11:43:07.422203782 +0000 UTC m=+1947.013415305" lastFinishedPulling="2025-11-26 11:43:08.015185815 +0000 UTC m=+1947.606397338" observedRunningTime="2025-11-26 11:43:08.609111267 +0000 UTC m=+1948.200322789" watchObservedRunningTime="2025-11-26 11:43:08.613316257 +0000 UTC m=+1948.204527779" Nov 26 11:43:15 crc kubenswrapper[4622]: I1126 11:43:15.198730 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:43:15 crc kubenswrapper[4622]: I1126 11:43:15.199280 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:43:33 crc kubenswrapper[4622]: I1126 11:43:33.775437 4622 generic.go:334] "Generic (PLEG): container finished" podID="e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" containerID="2d90adb9374c09385d9d8733c6122bf25b2202189830d86e9c01fc080bd13b30" exitCode=0 Nov 26 11:43:33 crc kubenswrapper[4622]: I1126 11:43:33.775520 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" event={"ID":"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e","Type":"ContainerDied","Data":"2d90adb9374c09385d9d8733c6122bf25b2202189830d86e9c01fc080bd13b30"} Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.111976 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.244388 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key\") pod \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.244701 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory\") pod \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.244801 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjqz2\" (UniqueName: \"kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2\") pod \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.244927 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph\") pod \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\" (UID: \"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e\") " Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.250820 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph" (OuterVolumeSpecName: "ceph") pod "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" (UID: "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.251259 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2" (OuterVolumeSpecName: "kube-api-access-xjqz2") pod "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" (UID: "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e"). InnerVolumeSpecName "kube-api-access-xjqz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.267754 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" (UID: "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.269184 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory" (OuterVolumeSpecName: "inventory") pod "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" (UID: "e8fb2686-cd66-4e6e-ac7f-466cf5211f3e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.348066 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.348104 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.348116 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.348127 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjqz2\" (UniqueName: \"kubernetes.io/projected/e8fb2686-cd66-4e6e-ac7f-466cf5211f3e-kube-api-access-xjqz2\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.790584 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" event={"ID":"e8fb2686-cd66-4e6e-ac7f-466cf5211f3e","Type":"ContainerDied","Data":"eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8"} Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.790897 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eda476fd0d31eb6e10359d7ea1354c85db7b2662a5fabf3dba9f922d48c9a7a8" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.790643 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g4z46" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.852069 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts"] Nov 26 11:43:35 crc kubenswrapper[4622]: E1126 11:43:35.852492 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.852649 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.852887 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8fb2686-cd66-4e6e-ac7f-466cf5211f3e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.853589 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.855786 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.855956 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.855981 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.856066 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.856481 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.860699 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts"] Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.958815 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.958878 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krt2x\" (UniqueName: \"kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.958968 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:35 crc kubenswrapper[4622]: I1126 11:43:35.959123 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.061140 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.061279 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.061312 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krt2x\" (UniqueName: \"kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.061350 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.065815 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.065834 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.066821 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.077151 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krt2x\" (UniqueName: \"kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.171157 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.629574 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts"] Nov 26 11:43:36 crc kubenswrapper[4622]: I1126 11:43:36.798799 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" event={"ID":"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c","Type":"ContainerStarted","Data":"739467683e22a8e20b166ca17721b95acd5177db41b43a57e6b0d3ed35d031c0"} Nov 26 11:43:37 crc kubenswrapper[4622]: I1126 11:43:37.808802 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" event={"ID":"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c","Type":"ContainerStarted","Data":"754147fc5239461d9e37f35d0000020fa9231ee1eab7ddc756148c3f3205641b"} Nov 26 11:43:37 crc kubenswrapper[4622]: I1126 11:43:37.832552 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" podStartSLOduration=2.262794595 podStartE2EDuration="2.832527455s" podCreationTimestamp="2025-11-26 11:43:35 +0000 UTC" firstStartedPulling="2025-11-26 11:43:36.627279093 +0000 UTC m=+1976.218490615" lastFinishedPulling="2025-11-26 11:43:37.197011943 +0000 UTC m=+1976.788223475" observedRunningTime="2025-11-26 11:43:37.824388005 +0000 UTC m=+1977.415599527" watchObservedRunningTime="2025-11-26 11:43:37.832527455 +0000 UTC m=+1977.423738976" Nov 26 11:43:40 crc kubenswrapper[4622]: I1126 11:43:40.837584 4622 generic.go:334] "Generic (PLEG): container finished" podID="5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" containerID="754147fc5239461d9e37f35d0000020fa9231ee1eab7ddc756148c3f3205641b" exitCode=0 Nov 26 11:43:40 crc kubenswrapper[4622]: I1126 11:43:40.837642 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" event={"ID":"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c","Type":"ContainerDied","Data":"754147fc5239461d9e37f35d0000020fa9231ee1eab7ddc756148c3f3205641b"} Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.197809 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.290812 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key\") pod \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.290928 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph\") pod \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.291101 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory\") pod \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.291149 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krt2x\" (UniqueName: \"kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x\") pod \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\" (UID: \"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c\") " Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.297279 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph" (OuterVolumeSpecName: "ceph") pod "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" (UID: "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.297679 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x" (OuterVolumeSpecName: "kube-api-access-krt2x") pod "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" (UID: "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c"). InnerVolumeSpecName "kube-api-access-krt2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.315007 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" (UID: "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.315110 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory" (OuterVolumeSpecName: "inventory") pod "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" (UID: "5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.394364 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.394397 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krt2x\" (UniqueName: \"kubernetes.io/projected/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-kube-api-access-krt2x\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.394412 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.394421 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.854238 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" event={"ID":"5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c","Type":"ContainerDied","Data":"739467683e22a8e20b166ca17721b95acd5177db41b43a57e6b0d3ed35d031c0"} Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.854277 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="739467683e22a8e20b166ca17721b95acd5177db41b43a57e6b0d3ed35d031c0" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.854271 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.916285 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd"] Nov 26 11:43:42 crc kubenswrapper[4622]: E1126 11:43:42.916734 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.916757 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.916969 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.917791 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.919396 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.919583 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.924195 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd"] Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.925076 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.926228 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:43:42 crc kubenswrapper[4622]: I1126 11:43:42.926467 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.006990 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.007027 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.007059 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.007391 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxgh6\" (UniqueName: \"kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.110096 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxgh6\" (UniqueName: \"kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.110282 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.110313 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.110359 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.117089 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.117143 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.117672 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.124899 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxgh6\" (UniqueName: \"kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m99bd\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.243793 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.735883 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd"] Nov 26 11:43:43 crc kubenswrapper[4622]: I1126 11:43:43.866209 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" event={"ID":"0f029b05-5927-4ba7-a37d-b2b44c00071d","Type":"ContainerStarted","Data":"5a2c3d49a740240adfb31c4ea9d53a5a8c016ba0c34102867415d72de87b915c"} Nov 26 11:43:44 crc kubenswrapper[4622]: I1126 11:43:44.876851 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" event={"ID":"0f029b05-5927-4ba7-a37d-b2b44c00071d","Type":"ContainerStarted","Data":"61fcbffc7be6dc852a23326681915d98e3eb38d6d38676e18cf5768b6621dddb"} Nov 26 11:43:44 crc kubenswrapper[4622]: I1126 11:43:44.890117 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" podStartSLOduration=2.128654607 podStartE2EDuration="2.890099053s" podCreationTimestamp="2025-11-26 11:43:42 +0000 UTC" firstStartedPulling="2025-11-26 11:43:43.743018883 +0000 UTC m=+1983.334230405" lastFinishedPulling="2025-11-26 11:43:44.50446333 +0000 UTC m=+1984.095674851" observedRunningTime="2025-11-26 11:43:44.889458565 +0000 UTC m=+1984.480670087" watchObservedRunningTime="2025-11-26 11:43:44.890099053 +0000 UTC m=+1984.481310575" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.007894 4622 scope.go:117] "RemoveContainer" containerID="066d67ce7b7206804751cc96fca97453836ded6ee87038b977ae28c1329bac73" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.048599 4622 scope.go:117] "RemoveContainer" containerID="fdf1cc346dc50303917192d6fe434050dd4718bc0d9384983590223776bf9293" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.076003 4622 scope.go:117] "RemoveContainer" containerID="1d7fd813cdd074221cff9d9c6f4cf45662257476bb786721ebde10c148ca6c10" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.199494 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.199568 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.199626 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.200391 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.200448 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f" gracePeriod=600 Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.886346 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f" exitCode=0 Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.886424 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f"} Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.886680 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5"} Nov 26 11:43:45 crc kubenswrapper[4622]: I1126 11:43:45.886709 4622 scope.go:117] "RemoveContainer" containerID="ed53af3d9443f05ed0c0d41055ab4fa615565bc0c73ea9ae2be6b634e0eb757c" Nov 26 11:44:14 crc kubenswrapper[4622]: I1126 11:44:14.125930 4622 generic.go:334] "Generic (PLEG): container finished" podID="0f029b05-5927-4ba7-a37d-b2b44c00071d" containerID="61fcbffc7be6dc852a23326681915d98e3eb38d6d38676e18cf5768b6621dddb" exitCode=0 Nov 26 11:44:14 crc kubenswrapper[4622]: I1126 11:44:14.126033 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" event={"ID":"0f029b05-5927-4ba7-a37d-b2b44c00071d","Type":"ContainerDied","Data":"61fcbffc7be6dc852a23326681915d98e3eb38d6d38676e18cf5768b6621dddb"} Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.459041 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.477829 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key\") pod \"0f029b05-5927-4ba7-a37d-b2b44c00071d\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.477944 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory\") pod \"0f029b05-5927-4ba7-a37d-b2b44c00071d\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.477987 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph\") pod \"0f029b05-5927-4ba7-a37d-b2b44c00071d\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.478112 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxgh6\" (UniqueName: \"kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6\") pod \"0f029b05-5927-4ba7-a37d-b2b44c00071d\" (UID: \"0f029b05-5927-4ba7-a37d-b2b44c00071d\") " Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.486069 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6" (OuterVolumeSpecName: "kube-api-access-mxgh6") pod "0f029b05-5927-4ba7-a37d-b2b44c00071d" (UID: "0f029b05-5927-4ba7-a37d-b2b44c00071d"). InnerVolumeSpecName "kube-api-access-mxgh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.486481 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph" (OuterVolumeSpecName: "ceph") pod "0f029b05-5927-4ba7-a37d-b2b44c00071d" (UID: "0f029b05-5927-4ba7-a37d-b2b44c00071d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.502463 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0f029b05-5927-4ba7-a37d-b2b44c00071d" (UID: "0f029b05-5927-4ba7-a37d-b2b44c00071d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.507696 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory" (OuterVolumeSpecName: "inventory") pod "0f029b05-5927-4ba7-a37d-b2b44c00071d" (UID: "0f029b05-5927-4ba7-a37d-b2b44c00071d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.580470 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.580516 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.580528 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f029b05-5927-4ba7-a37d-b2b44c00071d-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:15 crc kubenswrapper[4622]: I1126 11:44:15.580537 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxgh6\" (UniqueName: \"kubernetes.io/projected/0f029b05-5927-4ba7-a37d-b2b44c00071d-kube-api-access-mxgh6\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.141074 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" event={"ID":"0f029b05-5927-4ba7-a37d-b2b44c00071d","Type":"ContainerDied","Data":"5a2c3d49a740240adfb31c4ea9d53a5a8c016ba0c34102867415d72de87b915c"} Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.141150 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a2c3d49a740240adfb31c4ea9d53a5a8c016ba0c34102867415d72de87b915c" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.141173 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m99bd" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.211951 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8gvkn"] Nov 26 11:44:16 crc kubenswrapper[4622]: E1126 11:44:16.212404 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f029b05-5927-4ba7-a37d-b2b44c00071d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.212431 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f029b05-5927-4ba7-a37d-b2b44c00071d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.212711 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f029b05-5927-4ba7-a37d-b2b44c00071d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.213408 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.217236 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.217356 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.217436 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.217630 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.223114 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8gvkn"] Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.225076 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.292973 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz6q4\" (UniqueName: \"kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.293344 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.293437 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.293536 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.395028 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.395066 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.395090 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.395127 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz6q4\" (UniqueName: \"kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.401183 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.402105 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.402305 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.410375 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz6q4\" (UniqueName: \"kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4\") pod \"ssh-known-hosts-edpm-deployment-8gvkn\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:16 crc kubenswrapper[4622]: I1126 11:44:16.530919 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:17 crc kubenswrapper[4622]: I1126 11:44:17.003586 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8gvkn"] Nov 26 11:44:17 crc kubenswrapper[4622]: I1126 11:44:17.150404 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" event={"ID":"b7189366-2284-4019-b4c4-ecc32857a6c6","Type":"ContainerStarted","Data":"6b63f8a1b6132ff1a0e1b4722b243cba27f6f34904ac29b0d0dd428f5af3d9ca"} Nov 26 11:44:18 crc kubenswrapper[4622]: I1126 11:44:18.158722 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" event={"ID":"b7189366-2284-4019-b4c4-ecc32857a6c6","Type":"ContainerStarted","Data":"0bcdad02a67e81298754833b399ff95eeed1b5fe503bd0b913a01ebb94752d57"} Nov 26 11:44:18 crc kubenswrapper[4622]: I1126 11:44:18.177821 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" podStartSLOduration=1.301636177 podStartE2EDuration="2.177803043s" podCreationTimestamp="2025-11-26 11:44:16 +0000 UTC" firstStartedPulling="2025-11-26 11:44:17.005873384 +0000 UTC m=+2016.597084906" lastFinishedPulling="2025-11-26 11:44:17.882040251 +0000 UTC m=+2017.473251772" observedRunningTime="2025-11-26 11:44:18.170426142 +0000 UTC m=+2017.761637664" watchObservedRunningTime="2025-11-26 11:44:18.177803043 +0000 UTC m=+2017.769014565" Nov 26 11:44:25 crc kubenswrapper[4622]: I1126 11:44:25.207111 4622 generic.go:334] "Generic (PLEG): container finished" podID="b7189366-2284-4019-b4c4-ecc32857a6c6" containerID="0bcdad02a67e81298754833b399ff95eeed1b5fe503bd0b913a01ebb94752d57" exitCode=0 Nov 26 11:44:25 crc kubenswrapper[4622]: I1126 11:44:25.207203 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" event={"ID":"b7189366-2284-4019-b4c4-ecc32857a6c6","Type":"ContainerDied","Data":"0bcdad02a67e81298754833b399ff95eeed1b5fe503bd0b913a01ebb94752d57"} Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.500241 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.667754 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0\") pod \"b7189366-2284-4019-b4c4-ecc32857a6c6\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.668022 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph\") pod \"b7189366-2284-4019-b4c4-ecc32857a6c6\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.668085 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam\") pod \"b7189366-2284-4019-b4c4-ecc32857a6c6\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.668110 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz6q4\" (UniqueName: \"kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4\") pod \"b7189366-2284-4019-b4c4-ecc32857a6c6\" (UID: \"b7189366-2284-4019-b4c4-ecc32857a6c6\") " Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.674655 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph" (OuterVolumeSpecName: "ceph") pod "b7189366-2284-4019-b4c4-ecc32857a6c6" (UID: "b7189366-2284-4019-b4c4-ecc32857a6c6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.674677 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4" (OuterVolumeSpecName: "kube-api-access-zz6q4") pod "b7189366-2284-4019-b4c4-ecc32857a6c6" (UID: "b7189366-2284-4019-b4c4-ecc32857a6c6"). InnerVolumeSpecName "kube-api-access-zz6q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.688868 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b7189366-2284-4019-b4c4-ecc32857a6c6" (UID: "b7189366-2284-4019-b4c4-ecc32857a6c6"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.689977 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "b7189366-2284-4019-b4c4-ecc32857a6c6" (UID: "b7189366-2284-4019-b4c4-ecc32857a6c6"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.770923 4622 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.770954 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.770965 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7189366-2284-4019-b4c4-ecc32857a6c6-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:26 crc kubenswrapper[4622]: I1126 11:44:26.770978 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz6q4\" (UniqueName: \"kubernetes.io/projected/b7189366-2284-4019-b4c4-ecc32857a6c6-kube-api-access-zz6q4\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.228736 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" event={"ID":"b7189366-2284-4019-b4c4-ecc32857a6c6","Type":"ContainerDied","Data":"6b63f8a1b6132ff1a0e1b4722b243cba27f6f34904ac29b0d0dd428f5af3d9ca"} Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.228781 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b63f8a1b6132ff1a0e1b4722b243cba27f6f34904ac29b0d0dd428f5af3d9ca" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.228782 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8gvkn" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.271949 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599"] Nov 26 11:44:27 crc kubenswrapper[4622]: E1126 11:44:27.272285 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7189366-2284-4019-b4c4-ecc32857a6c6" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.272307 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7189366-2284-4019-b4c4-ecc32857a6c6" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.272521 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7189366-2284-4019-b4c4-ecc32857a6c6" containerName="ssh-known-hosts-edpm-deployment" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.273115 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.276111 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.276225 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.276293 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.277164 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.281872 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599"] Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.288253 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.381444 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.381628 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6vbn\" (UniqueName: \"kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.381682 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.381756 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.483912 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6vbn\" (UniqueName: \"kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.483996 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.484075 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.484236 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.487469 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.487553 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.488460 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.498770 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6vbn\" (UniqueName: \"kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8f599\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:27 crc kubenswrapper[4622]: I1126 11:44:27.590036 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:28 crc kubenswrapper[4622]: I1126 11:44:28.019105 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599"] Nov 26 11:44:28 crc kubenswrapper[4622]: I1126 11:44:28.237058 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" event={"ID":"e1cc67a8-3b68-4f53-8e83-8986eaf29d05","Type":"ContainerStarted","Data":"f6569041e57432b1f507d6212c1f4c8b99b8268d236cd959e7930da229fa9271"} Nov 26 11:44:29 crc kubenswrapper[4622]: I1126 11:44:29.245366 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" event={"ID":"e1cc67a8-3b68-4f53-8e83-8986eaf29d05","Type":"ContainerStarted","Data":"a04a94297b3c9cf5f73978b0a5e7dd108c39ce0535599db2b0658dbfaa0b4d20"} Nov 26 11:44:29 crc kubenswrapper[4622]: I1126 11:44:29.263323 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" podStartSLOduration=1.745158196 podStartE2EDuration="2.263307141s" podCreationTimestamp="2025-11-26 11:44:27 +0000 UTC" firstStartedPulling="2025-11-26 11:44:28.023620226 +0000 UTC m=+2027.614831747" lastFinishedPulling="2025-11-26 11:44:28.54176917 +0000 UTC m=+2028.132980692" observedRunningTime="2025-11-26 11:44:29.256722283 +0000 UTC m=+2028.847933806" watchObservedRunningTime="2025-11-26 11:44:29.263307141 +0000 UTC m=+2028.854518663" Nov 26 11:44:34 crc kubenswrapper[4622]: I1126 11:44:34.282253 4622 generic.go:334] "Generic (PLEG): container finished" podID="e1cc67a8-3b68-4f53-8e83-8986eaf29d05" containerID="a04a94297b3c9cf5f73978b0a5e7dd108c39ce0535599db2b0658dbfaa0b4d20" exitCode=0 Nov 26 11:44:34 crc kubenswrapper[4622]: I1126 11:44:34.282329 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" event={"ID":"e1cc67a8-3b68-4f53-8e83-8986eaf29d05","Type":"ContainerDied","Data":"a04a94297b3c9cf5f73978b0a5e7dd108c39ce0535599db2b0658dbfaa0b4d20"} Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.630734 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.735956 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph\") pod \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.736039 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6vbn\" (UniqueName: \"kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn\") pod \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.736080 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key\") pod \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.736178 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory\") pod \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\" (UID: \"e1cc67a8-3b68-4f53-8e83-8986eaf29d05\") " Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.741475 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn" (OuterVolumeSpecName: "kube-api-access-v6vbn") pod "e1cc67a8-3b68-4f53-8e83-8986eaf29d05" (UID: "e1cc67a8-3b68-4f53-8e83-8986eaf29d05"). InnerVolumeSpecName "kube-api-access-v6vbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.742022 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph" (OuterVolumeSpecName: "ceph") pod "e1cc67a8-3b68-4f53-8e83-8986eaf29d05" (UID: "e1cc67a8-3b68-4f53-8e83-8986eaf29d05"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.758835 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e1cc67a8-3b68-4f53-8e83-8986eaf29d05" (UID: "e1cc67a8-3b68-4f53-8e83-8986eaf29d05"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.759148 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory" (OuterVolumeSpecName: "inventory") pod "e1cc67a8-3b68-4f53-8e83-8986eaf29d05" (UID: "e1cc67a8-3b68-4f53-8e83-8986eaf29d05"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.838166 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6vbn\" (UniqueName: \"kubernetes.io/projected/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-kube-api-access-v6vbn\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.838191 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.838203 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:35 crc kubenswrapper[4622]: I1126 11:44:35.838212 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1cc67a8-3b68-4f53-8e83-8986eaf29d05-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.298204 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" event={"ID":"e1cc67a8-3b68-4f53-8e83-8986eaf29d05","Type":"ContainerDied","Data":"f6569041e57432b1f507d6212c1f4c8b99b8268d236cd959e7930da229fa9271"} Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.298249 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6569041e57432b1f507d6212c1f4c8b99b8268d236cd959e7930da229fa9271" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.298260 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8f599" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.349837 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk"] Nov 26 11:44:36 crc kubenswrapper[4622]: E1126 11:44:36.350182 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1cc67a8-3b68-4f53-8e83-8986eaf29d05" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.350204 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1cc67a8-3b68-4f53-8e83-8986eaf29d05" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.350405 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1cc67a8-3b68-4f53-8e83-8986eaf29d05" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.351042 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.354677 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.354714 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.354803 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.355317 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.355918 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.359924 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk"] Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.449372 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.449449 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsrdr\" (UniqueName: \"kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.449733 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.449846 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.551290 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.551616 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.551643 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.551685 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsrdr\" (UniqueName: \"kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.556780 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.556830 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.565848 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.566299 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsrdr\" (UniqueName: \"kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:36 crc kubenswrapper[4622]: I1126 11:44:36.666474 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:37 crc kubenswrapper[4622]: I1126 11:44:37.105156 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk"] Nov 26 11:44:37 crc kubenswrapper[4622]: I1126 11:44:37.306178 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" event={"ID":"6cd87990-9f66-40c9-ac5c-68b8755c46cd","Type":"ContainerStarted","Data":"aaac95010fa422f4a33d613ee2d32993d4a11b848cc85d1559f79ba8ca3dc529"} Nov 26 11:44:38 crc kubenswrapper[4622]: I1126 11:44:38.315554 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" event={"ID":"6cd87990-9f66-40c9-ac5c-68b8755c46cd","Type":"ContainerStarted","Data":"14a2c2d558f7633b2c85822da79e99fcdae76b3c19f87d83bc37aacf558bcd41"} Nov 26 11:44:38 crc kubenswrapper[4622]: I1126 11:44:38.328595 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" podStartSLOduration=1.823146224 podStartE2EDuration="2.328578457s" podCreationTimestamp="2025-11-26 11:44:36 +0000 UTC" firstStartedPulling="2025-11-26 11:44:37.108116135 +0000 UTC m=+2036.699327657" lastFinishedPulling="2025-11-26 11:44:37.613548368 +0000 UTC m=+2037.204759890" observedRunningTime="2025-11-26 11:44:38.327931006 +0000 UTC m=+2037.919142528" watchObservedRunningTime="2025-11-26 11:44:38.328578457 +0000 UTC m=+2037.919789978" Nov 26 11:44:45 crc kubenswrapper[4622]: I1126 11:44:45.372705 4622 generic.go:334] "Generic (PLEG): container finished" podID="6cd87990-9f66-40c9-ac5c-68b8755c46cd" containerID="14a2c2d558f7633b2c85822da79e99fcdae76b3c19f87d83bc37aacf558bcd41" exitCode=0 Nov 26 11:44:45 crc kubenswrapper[4622]: I1126 11:44:45.372769 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" event={"ID":"6cd87990-9f66-40c9-ac5c-68b8755c46cd","Type":"ContainerDied","Data":"14a2c2d558f7633b2c85822da79e99fcdae76b3c19f87d83bc37aacf558bcd41"} Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.678053 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.842812 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory\") pod \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.842876 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsrdr\" (UniqueName: \"kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr\") pod \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.842921 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph\") pod \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.843112 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key\") pod \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\" (UID: \"6cd87990-9f66-40c9-ac5c-68b8755c46cd\") " Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.849237 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr" (OuterVolumeSpecName: "kube-api-access-xsrdr") pod "6cd87990-9f66-40c9-ac5c-68b8755c46cd" (UID: "6cd87990-9f66-40c9-ac5c-68b8755c46cd"). InnerVolumeSpecName "kube-api-access-xsrdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.850523 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph" (OuterVolumeSpecName: "ceph") pod "6cd87990-9f66-40c9-ac5c-68b8755c46cd" (UID: "6cd87990-9f66-40c9-ac5c-68b8755c46cd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.867740 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory" (OuterVolumeSpecName: "inventory") pod "6cd87990-9f66-40c9-ac5c-68b8755c46cd" (UID: "6cd87990-9f66-40c9-ac5c-68b8755c46cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.868072 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6cd87990-9f66-40c9-ac5c-68b8755c46cd" (UID: "6cd87990-9f66-40c9-ac5c-68b8755c46cd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.946358 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.946394 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.946405 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsrdr\" (UniqueName: \"kubernetes.io/projected/6cd87990-9f66-40c9-ac5c-68b8755c46cd-kube-api-access-xsrdr\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:46 crc kubenswrapper[4622]: I1126 11:44:46.946423 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6cd87990-9f66-40c9-ac5c-68b8755c46cd-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.392436 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" event={"ID":"6cd87990-9f66-40c9-ac5c-68b8755c46cd","Type":"ContainerDied","Data":"aaac95010fa422f4a33d613ee2d32993d4a11b848cc85d1559f79ba8ca3dc529"} Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.392491 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaac95010fa422f4a33d613ee2d32993d4a11b848cc85d1559f79ba8ca3dc529" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.392574 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.473458 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k"] Nov 26 11:44:47 crc kubenswrapper[4622]: E1126 11:44:47.474677 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd87990-9f66-40c9-ac5c-68b8755c46cd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.474706 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd87990-9f66-40c9-ac5c-68b8755c46cd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.474968 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cd87990-9f66-40c9-ac5c-68b8755c46cd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.475805 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.478543 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.478810 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.479103 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.479640 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.479734 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.479793 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.480097 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.480805 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.481316 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k"] Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.658825 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659174 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659213 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659377 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659455 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659561 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659598 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659717 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659756 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659796 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659892 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.659955 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.660034 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxjgm\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.761826 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762176 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762204 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762235 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762269 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762286 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762313 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762341 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762360 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxjgm\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762405 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762428 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762447 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.762514 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.766220 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.766413 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.767137 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.767459 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.767644 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.768467 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.768488 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.768770 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.769374 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.770060 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.770786 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.770878 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.777449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxjgm\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-ss25k\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:47 crc kubenswrapper[4622]: I1126 11:44:47.792914 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:44:48 crc kubenswrapper[4622]: I1126 11:44:48.252971 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k"] Nov 26 11:44:48 crc kubenswrapper[4622]: I1126 11:44:48.401181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" event={"ID":"f5e33fbe-58e7-46b2-9202-b7c646631fda","Type":"ContainerStarted","Data":"324cf4877a0940506551add20725fdced5e2f0aacd24c18bafe2b9ba04cd9280"} Nov 26 11:44:49 crc kubenswrapper[4622]: I1126 11:44:49.411232 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" event={"ID":"f5e33fbe-58e7-46b2-9202-b7c646631fda","Type":"ContainerStarted","Data":"48fd19101c1b0d414ca2e74dd7d43e70b4ab45619bc42829b769b15ca0f0f1f8"} Nov 26 11:44:49 crc kubenswrapper[4622]: I1126 11:44:49.428849 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" podStartSLOduration=1.666979033 podStartE2EDuration="2.428829715s" podCreationTimestamp="2025-11-26 11:44:47 +0000 UTC" firstStartedPulling="2025-11-26 11:44:48.257869893 +0000 UTC m=+2047.849081415" lastFinishedPulling="2025-11-26 11:44:49.019720575 +0000 UTC m=+2048.610932097" observedRunningTime="2025-11-26 11:44:49.427688162 +0000 UTC m=+2049.018899684" watchObservedRunningTime="2025-11-26 11:44:49.428829715 +0000 UTC m=+2049.020041236" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.132306 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9"] Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.134439 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.137386 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.137783 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.144575 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9"] Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.187212 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.187311 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d2wk\" (UniqueName: \"kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.187381 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.288633 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.288851 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d2wk\" (UniqueName: \"kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.288958 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.289624 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.295490 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.302884 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d2wk\" (UniqueName: \"kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk\") pod \"collect-profiles-29402625-bthp9\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.473803 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:00 crc kubenswrapper[4622]: I1126 11:45:00.881679 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9"] Nov 26 11:45:01 crc kubenswrapper[4622]: I1126 11:45:01.505413 4622 generic.go:334] "Generic (PLEG): container finished" podID="ef9579a5-fa27-4e8c-9579-ebf573b3578e" containerID="fbf93b6f59cddcab6cb0588b0fa6dbe7011c7981b8cab0af5ef23effdaca0b78" exitCode=0 Nov 26 11:45:01 crc kubenswrapper[4622]: I1126 11:45:01.505581 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" event={"ID":"ef9579a5-fa27-4e8c-9579-ebf573b3578e","Type":"ContainerDied","Data":"fbf93b6f59cddcab6cb0588b0fa6dbe7011c7981b8cab0af5ef23effdaca0b78"} Nov 26 11:45:01 crc kubenswrapper[4622]: I1126 11:45:01.505875 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" event={"ID":"ef9579a5-fa27-4e8c-9579-ebf573b3578e","Type":"ContainerStarted","Data":"bbd8c876fd42ff9789072f7af9c902eca78102c810a2784d963c1d7b2a4ba8d9"} Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.793055 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.945008 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d2wk\" (UniqueName: \"kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk\") pod \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.945441 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume\") pod \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.945470 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume\") pod \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\" (UID: \"ef9579a5-fa27-4e8c-9579-ebf573b3578e\") " Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.946388 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume" (OuterVolumeSpecName: "config-volume") pod "ef9579a5-fa27-4e8c-9579-ebf573b3578e" (UID: "ef9579a5-fa27-4e8c-9579-ebf573b3578e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.952656 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk" (OuterVolumeSpecName: "kube-api-access-6d2wk") pod "ef9579a5-fa27-4e8c-9579-ebf573b3578e" (UID: "ef9579a5-fa27-4e8c-9579-ebf573b3578e"). InnerVolumeSpecName "kube-api-access-6d2wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:02 crc kubenswrapper[4622]: I1126 11:45:02.953086 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ef9579a5-fa27-4e8c-9579-ebf573b3578e" (UID: "ef9579a5-fa27-4e8c-9579-ebf573b3578e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.047556 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef9579a5-fa27-4e8c-9579-ebf573b3578e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.047587 4622 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef9579a5-fa27-4e8c-9579-ebf573b3578e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.047600 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d2wk\" (UniqueName: \"kubernetes.io/projected/ef9579a5-fa27-4e8c-9579-ebf573b3578e-kube-api-access-6d2wk\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.526325 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" event={"ID":"ef9579a5-fa27-4e8c-9579-ebf573b3578e","Type":"ContainerDied","Data":"bbd8c876fd42ff9789072f7af9c902eca78102c810a2784d963c1d7b2a4ba8d9"} Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.526383 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbd8c876fd42ff9789072f7af9c902eca78102c810a2784d963c1d7b2a4ba8d9" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.526431 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402625-bthp9" Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.852723 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz"] Nov 26 11:45:03 crc kubenswrapper[4622]: I1126 11:45:03.858693 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402580-2wnqz"] Nov 26 11:45:04 crc kubenswrapper[4622]: I1126 11:45:04.716907 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3db268a7-30fd-4031-8eba-72d60056bbbd" path="/var/lib/kubelet/pods/3db268a7-30fd-4031-8eba-72d60056bbbd/volumes" Nov 26 11:45:11 crc kubenswrapper[4622]: I1126 11:45:11.587740 4622 generic.go:334] "Generic (PLEG): container finished" podID="f5e33fbe-58e7-46b2-9202-b7c646631fda" containerID="48fd19101c1b0d414ca2e74dd7d43e70b4ab45619bc42829b769b15ca0f0f1f8" exitCode=0 Nov 26 11:45:11 crc kubenswrapper[4622]: I1126 11:45:11.587900 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" event={"ID":"f5e33fbe-58e7-46b2-9202-b7c646631fda","Type":"ContainerDied","Data":"48fd19101c1b0d414ca2e74dd7d43e70b4ab45619bc42829b769b15ca0f0f1f8"} Nov 26 11:45:12 crc kubenswrapper[4622]: I1126 11:45:12.906520 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.051323 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.051889 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052067 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxjgm\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052185 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052408 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052521 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052627 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052749 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052880 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.052969 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.053062 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.053139 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.053221 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f5e33fbe-58e7-46b2-9202-b7c646631fda\" (UID: \"f5e33fbe-58e7-46b2-9202-b7c646631fda\") " Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.058768 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.060084 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.060246 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.060476 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.060554 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.061013 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm" (OuterVolumeSpecName: "kube-api-access-hxjgm") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "kube-api-access-hxjgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.061078 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.061475 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.061735 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.061766 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph" (OuterVolumeSpecName: "ceph") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.063129 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.079031 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.079970 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory" (OuterVolumeSpecName: "inventory") pod "f5e33fbe-58e7-46b2-9202-b7c646631fda" (UID: "f5e33fbe-58e7-46b2-9202-b7c646631fda"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157248 4622 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157330 4622 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157345 4622 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157360 4622 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157381 4622 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157394 4622 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157406 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxjgm\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-kube-api-access-hxjgm\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157419 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157432 4622 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157442 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157450 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157461 4622 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f5e33fbe-58e7-46b2-9202-b7c646631fda-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.157474 4622 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e33fbe-58e7-46b2-9202-b7c646631fda-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.603789 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" event={"ID":"f5e33fbe-58e7-46b2-9202-b7c646631fda","Type":"ContainerDied","Data":"324cf4877a0940506551add20725fdced5e2f0aacd24c18bafe2b9ba04cd9280"} Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.603831 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="324cf4877a0940506551add20725fdced5e2f0aacd24c18bafe2b9ba04cd9280" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.603850 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-ss25k" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.664381 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4"] Nov 26 11:45:13 crc kubenswrapper[4622]: E1126 11:45:13.664963 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9579a5-fa27-4e8c-9579-ebf573b3578e" containerName="collect-profiles" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.664980 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9579a5-fa27-4e8c-9579-ebf573b3578e" containerName="collect-profiles" Nov 26 11:45:13 crc kubenswrapper[4622]: E1126 11:45:13.665005 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e33fbe-58e7-46b2-9202-b7c646631fda" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.665013 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e33fbe-58e7-46b2-9202-b7c646631fda" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.665209 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef9579a5-fa27-4e8c-9579-ebf573b3578e" containerName="collect-profiles" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.665225 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e33fbe-58e7-46b2-9202-b7c646631fda" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.666071 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.672833 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4"] Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.673629 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.674087 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.674812 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.677131 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.677452 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.767135 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.767206 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.767404 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.767529 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dcvs\" (UniqueName: \"kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.869831 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.869901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.870025 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.870091 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dcvs\" (UniqueName: \"kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.873132 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.873244 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.873415 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.884664 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dcvs\" (UniqueName: \"kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:13 crc kubenswrapper[4622]: I1126 11:45:13.981038 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:14 crc kubenswrapper[4622]: I1126 11:45:14.445300 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4"] Nov 26 11:45:14 crc kubenswrapper[4622]: I1126 11:45:14.611660 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" event={"ID":"c780381a-c3ad-4538-a33f-b1d7611667ef","Type":"ContainerStarted","Data":"12a33ef41fd093d6e116ac76a3abfb4f8582e601721b48620735542c588a6be7"} Nov 26 11:45:15 crc kubenswrapper[4622]: I1126 11:45:15.621137 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" event={"ID":"c780381a-c3ad-4538-a33f-b1d7611667ef","Type":"ContainerStarted","Data":"2df9d343160521c9331fb0320c184ab5cc92fddd3e5e7ef9cbc20cb134a094c9"} Nov 26 11:45:19 crc kubenswrapper[4622]: I1126 11:45:19.654771 4622 generic.go:334] "Generic (PLEG): container finished" podID="c780381a-c3ad-4538-a33f-b1d7611667ef" containerID="2df9d343160521c9331fb0320c184ab5cc92fddd3e5e7ef9cbc20cb134a094c9" exitCode=0 Nov 26 11:45:19 crc kubenswrapper[4622]: I1126 11:45:19.654851 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" event={"ID":"c780381a-c3ad-4538-a33f-b1d7611667ef","Type":"ContainerDied","Data":"2df9d343160521c9331fb0320c184ab5cc92fddd3e5e7ef9cbc20cb134a094c9"} Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.005766 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.013559 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory\") pod \"c780381a-c3ad-4538-a33f-b1d7611667ef\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.013672 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key\") pod \"c780381a-c3ad-4538-a33f-b1d7611667ef\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.014090 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph\") pod \"c780381a-c3ad-4538-a33f-b1d7611667ef\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.014188 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dcvs\" (UniqueName: \"kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs\") pod \"c780381a-c3ad-4538-a33f-b1d7611667ef\" (UID: \"c780381a-c3ad-4538-a33f-b1d7611667ef\") " Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.024444 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph" (OuterVolumeSpecName: "ceph") pod "c780381a-c3ad-4538-a33f-b1d7611667ef" (UID: "c780381a-c3ad-4538-a33f-b1d7611667ef"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.026418 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs" (OuterVolumeSpecName: "kube-api-access-7dcvs") pod "c780381a-c3ad-4538-a33f-b1d7611667ef" (UID: "c780381a-c3ad-4538-a33f-b1d7611667ef"). InnerVolumeSpecName "kube-api-access-7dcvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.041573 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory" (OuterVolumeSpecName: "inventory") pod "c780381a-c3ad-4538-a33f-b1d7611667ef" (UID: "c780381a-c3ad-4538-a33f-b1d7611667ef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.042761 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c780381a-c3ad-4538-a33f-b1d7611667ef" (UID: "c780381a-c3ad-4538-a33f-b1d7611667ef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.116585 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.116611 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.116623 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dcvs\" (UniqueName: \"kubernetes.io/projected/c780381a-c3ad-4538-a33f-b1d7611667ef-kube-api-access-7dcvs\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.116634 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c780381a-c3ad-4538-a33f-b1d7611667ef-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.674969 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" event={"ID":"c780381a-c3ad-4538-a33f-b1d7611667ef","Type":"ContainerDied","Data":"12a33ef41fd093d6e116ac76a3abfb4f8582e601721b48620735542c588a6be7"} Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.675024 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a33ef41fd093d6e116ac76a3abfb4f8582e601721b48620735542c588a6be7" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.675048 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.759704 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn"] Nov 26 11:45:21 crc kubenswrapper[4622]: E1126 11:45:21.760140 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c780381a-c3ad-4538-a33f-b1d7611667ef" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.760161 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c780381a-c3ad-4538-a33f-b1d7611667ef" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.760359 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c780381a-c3ad-4538-a33f-b1d7611667ef" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.761003 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.765894 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.765921 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.766064 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.766185 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.766205 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.766604 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.766977 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn"] Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834094 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834175 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh55r\" (UniqueName: \"kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834410 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834669 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834716 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.834922 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937435 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937472 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937543 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937613 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937636 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh55r\" (UniqueName: \"kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.937672 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.938705 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.942733 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.942966 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.943227 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.943721 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:21 crc kubenswrapper[4622]: I1126 11:45:21.955456 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh55r\" (UniqueName: \"kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8rkn\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:22 crc kubenswrapper[4622]: I1126 11:45:22.080431 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:45:22 crc kubenswrapper[4622]: I1126 11:45:22.539448 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn"] Nov 26 11:45:22 crc kubenswrapper[4622]: I1126 11:45:22.686714 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" event={"ID":"0ea4999e-068f-4516-b986-41a81f868f48","Type":"ContainerStarted","Data":"790290b032e810e2840a9191df76b71ce3a72bbbf4d3eef08d7df95b482967a6"} Nov 26 11:45:23 crc kubenswrapper[4622]: I1126 11:45:23.697579 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" event={"ID":"0ea4999e-068f-4516-b986-41a81f868f48","Type":"ContainerStarted","Data":"5296ee3801ee9d853b3b37051885b1db56b4cc64a6e353020a5960493242f4e9"} Nov 26 11:45:23 crc kubenswrapper[4622]: I1126 11:45:23.726738 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" podStartSLOduration=1.9607152719999998 podStartE2EDuration="2.726665295s" podCreationTimestamp="2025-11-26 11:45:21 +0000 UTC" firstStartedPulling="2025-11-26 11:45:22.544376119 +0000 UTC m=+2082.135587641" lastFinishedPulling="2025-11-26 11:45:23.310326142 +0000 UTC m=+2082.901537664" observedRunningTime="2025-11-26 11:45:23.723191113 +0000 UTC m=+2083.314402635" watchObservedRunningTime="2025-11-26 11:45:23.726665295 +0000 UTC m=+2083.317876817" Nov 26 11:45:45 crc kubenswrapper[4622]: I1126 11:45:45.178078 4622 scope.go:117] "RemoveContainer" containerID="29f854cf4a79e1554349fbd2049c45246cbbf0e1e92a7178c7233ed0237edd30" Nov 26 11:45:45 crc kubenswrapper[4622]: I1126 11:45:45.198637 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:45:45 crc kubenswrapper[4622]: I1126 11:45:45.198711 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.818155 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.821413 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.833062 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.984855 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jsg5\" (UniqueName: \"kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.985172 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:10 crc kubenswrapper[4622]: I1126 11:46:10.985264 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.090610 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jsg5\" (UniqueName: \"kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.090678 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.090815 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.094389 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.096597 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.114717 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jsg5\" (UniqueName: \"kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5\") pod \"redhat-marketplace-mj78m\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.147964 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:11 crc kubenswrapper[4622]: I1126 11:46:11.569391 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:12 crc kubenswrapper[4622]: I1126 11:46:12.098837 4622 generic.go:334] "Generic (PLEG): container finished" podID="bd2af36e-4117-4586-9afa-523b261f4e14" containerID="ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356" exitCode=0 Nov 26 11:46:12 crc kubenswrapper[4622]: I1126 11:46:12.098926 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerDied","Data":"ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356"} Nov 26 11:46:12 crc kubenswrapper[4622]: I1126 11:46:12.099094 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerStarted","Data":"a0239d6be0cd03d81e73c6dbb7a5c2862afd4c1787901269046b4ec228e1ca65"} Nov 26 11:46:12 crc kubenswrapper[4622]: I1126 11:46:12.100255 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:46:14 crc kubenswrapper[4622]: I1126 11:46:14.115450 4622 generic.go:334] "Generic (PLEG): container finished" podID="0ea4999e-068f-4516-b986-41a81f868f48" containerID="5296ee3801ee9d853b3b37051885b1db56b4cc64a6e353020a5960493242f4e9" exitCode=0 Nov 26 11:46:14 crc kubenswrapper[4622]: I1126 11:46:14.115538 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" event={"ID":"0ea4999e-068f-4516-b986-41a81f868f48","Type":"ContainerDied","Data":"5296ee3801ee9d853b3b37051885b1db56b4cc64a6e353020a5960493242f4e9"} Nov 26 11:46:14 crc kubenswrapper[4622]: I1126 11:46:14.118199 4622 generic.go:334] "Generic (PLEG): container finished" podID="bd2af36e-4117-4586-9afa-523b261f4e14" containerID="6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db" exitCode=0 Nov 26 11:46:14 crc kubenswrapper[4622]: I1126 11:46:14.118225 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerDied","Data":"6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db"} Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.126835 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerStarted","Data":"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce"} Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.198952 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.199288 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.493617 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.510739 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mj78m" podStartSLOduration=2.920783644 podStartE2EDuration="5.510721394s" podCreationTimestamp="2025-11-26 11:46:10 +0000 UTC" firstStartedPulling="2025-11-26 11:46:12.100021668 +0000 UTC m=+2131.691233190" lastFinishedPulling="2025-11-26 11:46:14.689959428 +0000 UTC m=+2134.281170940" observedRunningTime="2025-11-26 11:46:15.150442668 +0000 UTC m=+2134.741654190" watchObservedRunningTime="2025-11-26 11:46:15.510721394 +0000 UTC m=+2135.101932916" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681419 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681511 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681589 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh55r\" (UniqueName: \"kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681618 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681660 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.681687 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0\") pod \"0ea4999e-068f-4516-b986-41a81f868f48\" (UID: \"0ea4999e-068f-4516-b986-41a81f868f48\") " Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.686788 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph" (OuterVolumeSpecName: "ceph") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.687551 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.692984 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r" (OuterVolumeSpecName: "kube-api-access-sh55r") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "kube-api-access-sh55r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.703056 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.704037 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.708452 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory" (OuterVolumeSpecName: "inventory") pod "0ea4999e-068f-4516-b986-41a81f868f48" (UID: "0ea4999e-068f-4516-b986-41a81f868f48"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785242 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785277 4622 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785292 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh55r\" (UniqueName: \"kubernetes.io/projected/0ea4999e-068f-4516-b986-41a81f868f48-kube-api-access-sh55r\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785301 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785311 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ea4999e-068f-4516-b986-41a81f868f48-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:15 crc kubenswrapper[4622]: I1126 11:46:15.785320 4622 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0ea4999e-068f-4516-b986-41a81f868f48-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.136765 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.136782 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8rkn" event={"ID":"0ea4999e-068f-4516-b986-41a81f868f48","Type":"ContainerDied","Data":"790290b032e810e2840a9191df76b71ce3a72bbbf4d3eef08d7df95b482967a6"} Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.137086 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="790290b032e810e2840a9191df76b71ce3a72bbbf4d3eef08d7df95b482967a6" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.197896 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8"] Nov 26 11:46:16 crc kubenswrapper[4622]: E1126 11:46:16.198240 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ea4999e-068f-4516-b986-41a81f868f48" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.198257 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ea4999e-068f-4516-b986-41a81f868f48" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.198450 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ea4999e-068f-4516-b986-41a81f868f48" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.199095 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204254 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204308 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204318 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204327 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204382 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.204265 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.205670 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.209176 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8"] Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.293637 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.293694 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.293826 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.293983 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.294022 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4gvd\" (UniqueName: \"kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.294055 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.294094 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395611 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395669 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395760 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395854 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395884 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4gvd\" (UniqueName: \"kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395913 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.395942 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.399965 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.400151 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.400467 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.400617 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.402075 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.403190 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.410366 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4gvd\" (UniqueName: \"kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.512902 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:46:16 crc kubenswrapper[4622]: I1126 11:46:16.944438 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8"] Nov 26 11:46:17 crc kubenswrapper[4622]: I1126 11:46:17.145081 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" event={"ID":"2b278dc2-9da9-470a-93b5-4918011b54a0","Type":"ContainerStarted","Data":"fb620dc00ada1d86fbaadffb8c064d5ec8a7561256798da39612008b07221429"} Nov 26 11:46:18 crc kubenswrapper[4622]: I1126 11:46:18.152702 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" event={"ID":"2b278dc2-9da9-470a-93b5-4918011b54a0","Type":"ContainerStarted","Data":"48991a3dadde52d54dfbc92b6d474a0247d541c4a4921dceaf6e180502fad01f"} Nov 26 11:46:18 crc kubenswrapper[4622]: I1126 11:46:18.168793 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" podStartSLOduration=1.227676089 podStartE2EDuration="2.168779883s" podCreationTimestamp="2025-11-26 11:46:16 +0000 UTC" firstStartedPulling="2025-11-26 11:46:16.949952044 +0000 UTC m=+2136.541163566" lastFinishedPulling="2025-11-26 11:46:17.891055838 +0000 UTC m=+2137.482267360" observedRunningTime="2025-11-26 11:46:18.164980549 +0000 UTC m=+2137.756192071" watchObservedRunningTime="2025-11-26 11:46:18.168779883 +0000 UTC m=+2137.759991405" Nov 26 11:46:21 crc kubenswrapper[4622]: I1126 11:46:21.148489 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:21 crc kubenswrapper[4622]: I1126 11:46:21.148777 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:21 crc kubenswrapper[4622]: I1126 11:46:21.183654 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:21 crc kubenswrapper[4622]: I1126 11:46:21.218963 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:21 crc kubenswrapper[4622]: I1126 11:46:21.412788 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.189323 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mj78m" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="registry-server" containerID="cri-o://d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce" gracePeriod=2 Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.597806 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.621759 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jsg5\" (UniqueName: \"kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5\") pod \"bd2af36e-4117-4586-9afa-523b261f4e14\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.621826 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities\") pod \"bd2af36e-4117-4586-9afa-523b261f4e14\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.621937 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content\") pod \"bd2af36e-4117-4586-9afa-523b261f4e14\" (UID: \"bd2af36e-4117-4586-9afa-523b261f4e14\") " Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.622631 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities" (OuterVolumeSpecName: "utilities") pod "bd2af36e-4117-4586-9afa-523b261f4e14" (UID: "bd2af36e-4117-4586-9afa-523b261f4e14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.622823 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.629660 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5" (OuterVolumeSpecName: "kube-api-access-6jsg5") pod "bd2af36e-4117-4586-9afa-523b261f4e14" (UID: "bd2af36e-4117-4586-9afa-523b261f4e14"). InnerVolumeSpecName "kube-api-access-6jsg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.648653 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd2af36e-4117-4586-9afa-523b261f4e14" (UID: "bd2af36e-4117-4586-9afa-523b261f4e14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.726193 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jsg5\" (UniqueName: \"kubernetes.io/projected/bd2af36e-4117-4586-9afa-523b261f4e14-kube-api-access-6jsg5\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:23 crc kubenswrapper[4622]: I1126 11:46:23.726244 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2af36e-4117-4586-9afa-523b261f4e14-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.200255 4622 generic.go:334] "Generic (PLEG): container finished" podID="bd2af36e-4117-4586-9afa-523b261f4e14" containerID="d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce" exitCode=0 Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.200345 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj78m" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.200364 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerDied","Data":"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce"} Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.200794 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj78m" event={"ID":"bd2af36e-4117-4586-9afa-523b261f4e14","Type":"ContainerDied","Data":"a0239d6be0cd03d81e73c6dbb7a5c2862afd4c1787901269046b4ec228e1ca65"} Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.200836 4622 scope.go:117] "RemoveContainer" containerID="d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.226272 4622 scope.go:117] "RemoveContainer" containerID="6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.231671 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.238021 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj78m"] Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.259241 4622 scope.go:117] "RemoveContainer" containerID="ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.282232 4622 scope.go:117] "RemoveContainer" containerID="d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce" Nov 26 11:46:24 crc kubenswrapper[4622]: E1126 11:46:24.282631 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce\": container with ID starting with d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce not found: ID does not exist" containerID="d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.282691 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce"} err="failed to get container status \"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce\": rpc error: code = NotFound desc = could not find container \"d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce\": container with ID starting with d0e9c2c029759db3cb1c9cef40fbe66f0a5a74701f35ba12f21c70124e0de9ce not found: ID does not exist" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.282737 4622 scope.go:117] "RemoveContainer" containerID="6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db" Nov 26 11:46:24 crc kubenswrapper[4622]: E1126 11:46:24.283089 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db\": container with ID starting with 6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db not found: ID does not exist" containerID="6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.283121 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db"} err="failed to get container status \"6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db\": rpc error: code = NotFound desc = could not find container \"6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db\": container with ID starting with 6d81ed2f6c09cb1f96781dbad26f88b5d2e3fb4c63e648efb1d82ff3619e57db not found: ID does not exist" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.283147 4622 scope.go:117] "RemoveContainer" containerID="ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356" Nov 26 11:46:24 crc kubenswrapper[4622]: E1126 11:46:24.283403 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356\": container with ID starting with ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356 not found: ID does not exist" containerID="ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.283491 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356"} err="failed to get container status \"ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356\": rpc error: code = NotFound desc = could not find container \"ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356\": container with ID starting with ccb2561e56bba147241312374071e5fc1e7ee51a2dd8690e158e417612a92356 not found: ID does not exist" Nov 26 11:46:24 crc kubenswrapper[4622]: I1126 11:46:24.715244 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" path="/var/lib/kubelet/pods/bd2af36e-4117-4586-9afa-523b261f4e14/volumes" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.221855 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:25 crc kubenswrapper[4622]: E1126 11:46:25.222692 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="extract-content" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.222727 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="extract-content" Nov 26 11:46:25 crc kubenswrapper[4622]: E1126 11:46:25.222755 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="registry-server" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.222761 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="registry-server" Nov 26 11:46:25 crc kubenswrapper[4622]: E1126 11:46:25.222775 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="extract-utilities" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.222780 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="extract-utilities" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.223035 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2af36e-4117-4586-9afa-523b261f4e14" containerName="registry-server" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.224450 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.240352 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.257974 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.258212 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.258329 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc22x\" (UniqueName: \"kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.360065 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.360295 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.360385 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc22x\" (UniqueName: \"kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.360628 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.360870 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.378169 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc22x\" (UniqueName: \"kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x\") pod \"certified-operators-d7qp5\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:25 crc kubenswrapper[4622]: I1126 11:46:25.538540 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:26 crc kubenswrapper[4622]: I1126 11:46:26.012006 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:26 crc kubenswrapper[4622]: I1126 11:46:26.220610 4622 generic.go:334] "Generic (PLEG): container finished" podID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerID="c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25" exitCode=0 Nov 26 11:46:26 crc kubenswrapper[4622]: I1126 11:46:26.220805 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerDied","Data":"c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25"} Nov 26 11:46:26 crc kubenswrapper[4622]: I1126 11:46:26.220919 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerStarted","Data":"a488a3f7c34e9df59cfaf431e4d82cbbf6cd4b8038a9ed708ca82df8d0653ec9"} Nov 26 11:46:27 crc kubenswrapper[4622]: I1126 11:46:27.232014 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerStarted","Data":"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7"} Nov 26 11:46:28 crc kubenswrapper[4622]: I1126 11:46:28.241286 4622 generic.go:334] "Generic (PLEG): container finished" podID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerID="78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7" exitCode=0 Nov 26 11:46:28 crc kubenswrapper[4622]: I1126 11:46:28.241338 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerDied","Data":"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7"} Nov 26 11:46:29 crc kubenswrapper[4622]: I1126 11:46:29.251688 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerStarted","Data":"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848"} Nov 26 11:46:29 crc kubenswrapper[4622]: I1126 11:46:29.269137 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d7qp5" podStartSLOduration=1.7462724189999999 podStartE2EDuration="4.269112165s" podCreationTimestamp="2025-11-26 11:46:25 +0000 UTC" firstStartedPulling="2025-11-26 11:46:26.222639191 +0000 UTC m=+2145.813850713" lastFinishedPulling="2025-11-26 11:46:28.745478938 +0000 UTC m=+2148.336690459" observedRunningTime="2025-11-26 11:46:29.266560202 +0000 UTC m=+2148.857771724" watchObservedRunningTime="2025-11-26 11:46:29.269112165 +0000 UTC m=+2148.860323687" Nov 26 11:46:35 crc kubenswrapper[4622]: I1126 11:46:35.539615 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:35 crc kubenswrapper[4622]: I1126 11:46:35.540378 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:35 crc kubenswrapper[4622]: I1126 11:46:35.577169 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:36 crc kubenswrapper[4622]: I1126 11:46:36.356034 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:36 crc kubenswrapper[4622]: I1126 11:46:36.402456 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.329736 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d7qp5" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="registry-server" containerID="cri-o://84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848" gracePeriod=2 Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.738353 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.866198 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc22x\" (UniqueName: \"kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x\") pod \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.866286 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities\") pod \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.866378 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content\") pod \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\" (UID: \"0047dd0c-b2ae-44e4-b606-a7eb82a42975\") " Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.867421 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities" (OuterVolumeSpecName: "utilities") pod "0047dd0c-b2ae-44e4-b606-a7eb82a42975" (UID: "0047dd0c-b2ae-44e4-b606-a7eb82a42975"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.873202 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x" (OuterVolumeSpecName: "kube-api-access-nc22x") pod "0047dd0c-b2ae-44e4-b606-a7eb82a42975" (UID: "0047dd0c-b2ae-44e4-b606-a7eb82a42975"). InnerVolumeSpecName "kube-api-access-nc22x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.903233 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0047dd0c-b2ae-44e4-b606-a7eb82a42975" (UID: "0047dd0c-b2ae-44e4-b606-a7eb82a42975"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.970198 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc22x\" (UniqueName: \"kubernetes.io/projected/0047dd0c-b2ae-44e4-b606-a7eb82a42975-kube-api-access-nc22x\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.970246 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:38 crc kubenswrapper[4622]: I1126 11:46:38.970258 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0047dd0c-b2ae-44e4-b606-a7eb82a42975-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.341075 4622 generic.go:334] "Generic (PLEG): container finished" podID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerID="84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848" exitCode=0 Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.341133 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerDied","Data":"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848"} Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.341173 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7qp5" event={"ID":"0047dd0c-b2ae-44e4-b606-a7eb82a42975","Type":"ContainerDied","Data":"a488a3f7c34e9df59cfaf431e4d82cbbf6cd4b8038a9ed708ca82df8d0653ec9"} Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.341199 4622 scope.go:117] "RemoveContainer" containerID="84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.341203 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7qp5" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.361464 4622 scope.go:117] "RemoveContainer" containerID="78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.372447 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.378729 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d7qp5"] Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.384320 4622 scope.go:117] "RemoveContainer" containerID="c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.415056 4622 scope.go:117] "RemoveContainer" containerID="84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848" Nov 26 11:46:39 crc kubenswrapper[4622]: E1126 11:46:39.415574 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848\": container with ID starting with 84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848 not found: ID does not exist" containerID="84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.415623 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848"} err="failed to get container status \"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848\": rpc error: code = NotFound desc = could not find container \"84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848\": container with ID starting with 84ea1186877ca5f5132e763e562a796be6b224a3c67e0d95f24a9e0d72be8848 not found: ID does not exist" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.415656 4622 scope.go:117] "RemoveContainer" containerID="78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7" Nov 26 11:46:39 crc kubenswrapper[4622]: E1126 11:46:39.416064 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7\": container with ID starting with 78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7 not found: ID does not exist" containerID="78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.416086 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7"} err="failed to get container status \"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7\": rpc error: code = NotFound desc = could not find container \"78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7\": container with ID starting with 78fe495db57b22d7cba0a6215c44cf176d0ccfea086896889492b5331a37f2c7 not found: ID does not exist" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.416099 4622 scope.go:117] "RemoveContainer" containerID="c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25" Nov 26 11:46:39 crc kubenswrapper[4622]: E1126 11:46:39.416452 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25\": container with ID starting with c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25 not found: ID does not exist" containerID="c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25" Nov 26 11:46:39 crc kubenswrapper[4622]: I1126 11:46:39.416473 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25"} err="failed to get container status \"c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25\": rpc error: code = NotFound desc = could not find container \"c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25\": container with ID starting with c89f9f1680926180754c326634f98cc9c79d8ca95457207208ea0c828a24af25 not found: ID does not exist" Nov 26 11:46:40 crc kubenswrapper[4622]: I1126 11:46:40.715355 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" path="/var/lib/kubelet/pods/0047dd0c-b2ae-44e4-b606-a7eb82a42975/volumes" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.199082 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.199943 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.200010 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.200998 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.201056 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" gracePeriod=600 Nov 26 11:46:45 crc kubenswrapper[4622]: E1126 11:46:45.322373 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.404780 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" exitCode=0 Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.404958 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5"} Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.405109 4622 scope.go:117] "RemoveContainer" containerID="4f3c3e8c9752c1670dbfbb660eabeb660d71bf97873317cac5ce80997876ad7f" Nov 26 11:46:45 crc kubenswrapper[4622]: I1126 11:46:45.406759 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:46:45 crc kubenswrapper[4622]: E1126 11:46:45.407110 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.893004 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:46:51 crc kubenswrapper[4622]: E1126 11:46:51.894025 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="extract-utilities" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.894042 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="extract-utilities" Nov 26 11:46:51 crc kubenswrapper[4622]: E1126 11:46:51.894056 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="extract-content" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.894063 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="extract-content" Nov 26 11:46:51 crc kubenswrapper[4622]: E1126 11:46:51.894085 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="registry-server" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.894092 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="registry-server" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.894279 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0047dd0c-b2ae-44e4-b606-a7eb82a42975" containerName="registry-server" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.895671 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.902834 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.975709 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44ksc\" (UniqueName: \"kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.976038 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:51 crc kubenswrapper[4622]: I1126 11:46:51.976123 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.077744 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.077994 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44ksc\" (UniqueName: \"kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.078045 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.078248 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.078428 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.094968 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44ksc\" (UniqueName: \"kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc\") pod \"community-operators-lk8sk\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.218349 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:46:52 crc kubenswrapper[4622]: I1126 11:46:52.676124 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:46:53 crc kubenswrapper[4622]: I1126 11:46:53.477287 4622 generic.go:334] "Generic (PLEG): container finished" podID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerID="e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34" exitCode=0 Nov 26 11:46:53 crc kubenswrapper[4622]: I1126 11:46:53.477349 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerDied","Data":"e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34"} Nov 26 11:46:53 crc kubenswrapper[4622]: I1126 11:46:53.477777 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerStarted","Data":"7dfc731767cada73d46dd967a56976769e14073de626a2ca14ea9fd3e625a440"} Nov 26 11:46:54 crc kubenswrapper[4622]: I1126 11:46:54.490877 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerStarted","Data":"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a"} Nov 26 11:46:55 crc kubenswrapper[4622]: I1126 11:46:55.501939 4622 generic.go:334] "Generic (PLEG): container finished" podID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerID="89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a" exitCode=0 Nov 26 11:46:55 crc kubenswrapper[4622]: I1126 11:46:55.502010 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerDied","Data":"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a"} Nov 26 11:46:56 crc kubenswrapper[4622]: I1126 11:46:56.513746 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerStarted","Data":"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a"} Nov 26 11:46:56 crc kubenswrapper[4622]: I1126 11:46:56.538708 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lk8sk" podStartSLOduration=2.891848468 podStartE2EDuration="5.538694534s" podCreationTimestamp="2025-11-26 11:46:51 +0000 UTC" firstStartedPulling="2025-11-26 11:46:53.480810552 +0000 UTC m=+2173.072022074" lastFinishedPulling="2025-11-26 11:46:56.127656617 +0000 UTC m=+2175.718868140" observedRunningTime="2025-11-26 11:46:56.533646164 +0000 UTC m=+2176.124857687" watchObservedRunningTime="2025-11-26 11:46:56.538694534 +0000 UTC m=+2176.129906057" Nov 26 11:46:57 crc kubenswrapper[4622]: I1126 11:46:57.706253 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:46:57 crc kubenswrapper[4622]: E1126 11:46:57.706862 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.218557 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.219122 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.262294 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.583754 4622 generic.go:334] "Generic (PLEG): container finished" podID="2b278dc2-9da9-470a-93b5-4918011b54a0" containerID="48991a3dadde52d54dfbc92b6d474a0247d541c4a4921dceaf6e180502fad01f" exitCode=0 Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.583835 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" event={"ID":"2b278dc2-9da9-470a-93b5-4918011b54a0","Type":"ContainerDied","Data":"48991a3dadde52d54dfbc92b6d474a0247d541c4a4921dceaf6e180502fad01f"} Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.632726 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:02 crc kubenswrapper[4622]: I1126 11:47:02.675195 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:47:03 crc kubenswrapper[4622]: I1126 11:47:03.955126 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059143 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4gvd\" (UniqueName: \"kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059236 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059313 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059337 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059480 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059734 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.059829 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle\") pod \"2b278dc2-9da9-470a-93b5-4918011b54a0\" (UID: \"2b278dc2-9da9-470a-93b5-4918011b54a0\") " Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.067048 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph" (OuterVolumeSpecName: "ceph") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.068085 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd" (OuterVolumeSpecName: "kube-api-access-j4gvd") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "kube-api-access-j4gvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.068662 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.087245 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.087788 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.088309 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory" (OuterVolumeSpecName: "inventory") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.089952 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "2b278dc2-9da9-470a-93b5-4918011b54a0" (UID: "2b278dc2-9da9-470a-93b5-4918011b54a0"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165850 4622 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165884 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4gvd\" (UniqueName: \"kubernetes.io/projected/2b278dc2-9da9-470a-93b5-4918011b54a0-kube-api-access-j4gvd\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165896 4622 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165911 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165919 4622 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165931 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.165939 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b278dc2-9da9-470a-93b5-4918011b54a0-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.602913 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" event={"ID":"2b278dc2-9da9-470a-93b5-4918011b54a0","Type":"ContainerDied","Data":"fb620dc00ada1d86fbaadffb8c064d5ec8a7561256798da39612008b07221429"} Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.602977 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb620dc00ada1d86fbaadffb8c064d5ec8a7561256798da39612008b07221429" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.602932 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.603042 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lk8sk" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="registry-server" containerID="cri-o://2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a" gracePeriod=2 Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.675248 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5"] Nov 26 11:47:04 crc kubenswrapper[4622]: E1126 11:47:04.675667 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b278dc2-9da9-470a-93b5-4918011b54a0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.675688 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b278dc2-9da9-470a-93b5-4918011b54a0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.675905 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b278dc2-9da9-470a-93b5-4918011b54a0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.676548 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.678903 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.679363 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.679612 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.679776 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.679921 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.680138 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.687690 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5"] Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.778539 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.778679 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.778795 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.778895 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.779526 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.779822 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9p2p\" (UniqueName: \"kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.881838 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.881890 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.881977 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.882023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.882183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.882223 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9p2p\" (UniqueName: \"kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.888516 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.888594 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.888795 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.889047 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.889762 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.897724 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9p2p\" (UniqueName: \"kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:04 crc kubenswrapper[4622]: I1126 11:47:04.991892 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.473560 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.485705 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5"] Nov 26 11:47:05 crc kubenswrapper[4622]: W1126 11:47:05.491155 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61960c03_d6c3_417c_8445_a485f622d6b1.slice/crio-5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca WatchSource:0}: Error finding container 5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca: Status 404 returned error can't find the container with id 5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.599127 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities\") pod \"6ee9fc9e-e690-48f1-923d-c429562f0866\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.599210 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44ksc\" (UniqueName: \"kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc\") pod \"6ee9fc9e-e690-48f1-923d-c429562f0866\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.599263 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content\") pod \"6ee9fc9e-e690-48f1-923d-c429562f0866\" (UID: \"6ee9fc9e-e690-48f1-923d-c429562f0866\") " Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.600214 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities" (OuterVolumeSpecName: "utilities") pod "6ee9fc9e-e690-48f1-923d-c429562f0866" (UID: "6ee9fc9e-e690-48f1-923d-c429562f0866"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.605816 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc" (OuterVolumeSpecName: "kube-api-access-44ksc") pod "6ee9fc9e-e690-48f1-923d-c429562f0866" (UID: "6ee9fc9e-e690-48f1-923d-c429562f0866"). InnerVolumeSpecName "kube-api-access-44ksc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.616068 4622 generic.go:334] "Generic (PLEG): container finished" podID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerID="2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a" exitCode=0 Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.616169 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerDied","Data":"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a"} Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.616214 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lk8sk" event={"ID":"6ee9fc9e-e690-48f1-923d-c429562f0866","Type":"ContainerDied","Data":"7dfc731767cada73d46dd967a56976769e14073de626a2ca14ea9fd3e625a440"} Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.616240 4622 scope.go:117] "RemoveContainer" containerID="2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.616550 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lk8sk" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.618008 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" event={"ID":"61960c03-d6c3-417c-8445-a485f622d6b1","Type":"ContainerStarted","Data":"5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca"} Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.639809 4622 scope.go:117] "RemoveContainer" containerID="89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.646456 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ee9fc9e-e690-48f1-923d-c429562f0866" (UID: "6ee9fc9e-e690-48f1-923d-c429562f0866"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.659213 4622 scope.go:117] "RemoveContainer" containerID="e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.674750 4622 scope.go:117] "RemoveContainer" containerID="2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a" Nov 26 11:47:05 crc kubenswrapper[4622]: E1126 11:47:05.675164 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a\": container with ID starting with 2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a not found: ID does not exist" containerID="2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.675202 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a"} err="failed to get container status \"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a\": rpc error: code = NotFound desc = could not find container \"2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a\": container with ID starting with 2bf04f4c5480ec6a57e6ed62011e0d5bcf0c854d7ddbdae00808b230f5ef157a not found: ID does not exist" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.675232 4622 scope.go:117] "RemoveContainer" containerID="89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a" Nov 26 11:47:05 crc kubenswrapper[4622]: E1126 11:47:05.675601 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a\": container with ID starting with 89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a not found: ID does not exist" containerID="89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.675643 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a"} err="failed to get container status \"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a\": rpc error: code = NotFound desc = could not find container \"89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a\": container with ID starting with 89e383a79179b06461c91dca3d6a7b3e00fa828c190c9e5bfe83cbbff3de683a not found: ID does not exist" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.675670 4622 scope.go:117] "RemoveContainer" containerID="e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34" Nov 26 11:47:05 crc kubenswrapper[4622]: E1126 11:47:05.676024 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34\": container with ID starting with e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34 not found: ID does not exist" containerID="e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.676059 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34"} err="failed to get container status \"e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34\": rpc error: code = NotFound desc = could not find container \"e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34\": container with ID starting with e20f2e8376fbfb4442d61089b6935641824f565c9a518d0edcad8d1de40a9b34 not found: ID does not exist" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.701963 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.701990 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44ksc\" (UniqueName: \"kubernetes.io/projected/6ee9fc9e-e690-48f1-923d-c429562f0866-kube-api-access-44ksc\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.702001 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ee9fc9e-e690-48f1-923d-c429562f0866-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.944312 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:47:05 crc kubenswrapper[4622]: I1126 11:47:05.950454 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lk8sk"] Nov 26 11:47:06 crc kubenswrapper[4622]: I1126 11:47:06.629172 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" event={"ID":"61960c03-d6c3-417c-8445-a485f622d6b1","Type":"ContainerStarted","Data":"11843b92d298906be65383abee9cad785f4673a0514281aefd6ea4d9d1891dd3"} Nov 26 11:47:06 crc kubenswrapper[4622]: I1126 11:47:06.650153 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" podStartSLOduration=1.8977699989999999 podStartE2EDuration="2.650136586s" podCreationTimestamp="2025-11-26 11:47:04 +0000 UTC" firstStartedPulling="2025-11-26 11:47:05.49360329 +0000 UTC m=+2185.084814811" lastFinishedPulling="2025-11-26 11:47:06.245969876 +0000 UTC m=+2185.837181398" observedRunningTime="2025-11-26 11:47:06.646203299 +0000 UTC m=+2186.237414821" watchObservedRunningTime="2025-11-26 11:47:06.650136586 +0000 UTC m=+2186.241348108" Nov 26 11:47:06 crc kubenswrapper[4622]: I1126 11:47:06.715658 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" path="/var/lib/kubelet/pods/6ee9fc9e-e690-48f1-923d-c429562f0866/volumes" Nov 26 11:47:08 crc kubenswrapper[4622]: I1126 11:47:08.705844 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:47:08 crc kubenswrapper[4622]: E1126 11:47:08.707470 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:47:21 crc kubenswrapper[4622]: I1126 11:47:21.705988 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:47:21 crc kubenswrapper[4622]: E1126 11:47:21.706891 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:47:35 crc kubenswrapper[4622]: I1126 11:47:35.706138 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:47:35 crc kubenswrapper[4622]: E1126 11:47:35.707308 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:47:50 crc kubenswrapper[4622]: I1126 11:47:50.711916 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:47:50 crc kubenswrapper[4622]: E1126 11:47:50.712958 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:04 crc kubenswrapper[4622]: I1126 11:48:04.706576 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:48:04 crc kubenswrapper[4622]: E1126 11:48:04.707397 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:17 crc kubenswrapper[4622]: I1126 11:48:17.706641 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:48:17 crc kubenswrapper[4622]: E1126 11:48:17.707606 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:30 crc kubenswrapper[4622]: I1126 11:48:30.712461 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:48:30 crc kubenswrapper[4622]: E1126 11:48:30.713619 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:45 crc kubenswrapper[4622]: I1126 11:48:45.706361 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:48:45 crc kubenswrapper[4622]: E1126 11:48:45.707351 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.607101 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:48:47 crc kubenswrapper[4622]: E1126 11:48:47.607777 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="registry-server" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.607794 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="registry-server" Nov 26 11:48:47 crc kubenswrapper[4622]: E1126 11:48:47.607822 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="extract-content" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.607830 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="extract-content" Nov 26 11:48:47 crc kubenswrapper[4622]: E1126 11:48:47.607857 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="extract-utilities" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.607863 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="extract-utilities" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.608107 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ee9fc9e-e690-48f1-923d-c429562f0866" containerName="registry-server" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.609407 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.616647 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.703129 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.703540 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.703814 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7mv7\" (UniqueName: \"kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.805704 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7mv7\" (UniqueName: \"kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.805764 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.805870 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.806542 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.806735 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.823538 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7mv7\" (UniqueName: \"kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7\") pod \"redhat-operators-7qsvs\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:47 crc kubenswrapper[4622]: I1126 11:48:47.925175 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:48 crc kubenswrapper[4622]: I1126 11:48:48.340242 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:48:48 crc kubenswrapper[4622]: I1126 11:48:48.571424 4622 generic.go:334] "Generic (PLEG): container finished" podID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerID="fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387" exitCode=0 Nov 26 11:48:48 crc kubenswrapper[4622]: I1126 11:48:48.571534 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerDied","Data":"fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387"} Nov 26 11:48:48 crc kubenswrapper[4622]: I1126 11:48:48.571947 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerStarted","Data":"da6ba3dd4c9f38a60dc9abe94793caf56cc7f30c9fbb903a351334f6535e377e"} Nov 26 11:48:50 crc kubenswrapper[4622]: I1126 11:48:50.593315 4622 generic.go:334] "Generic (PLEG): container finished" podID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerID="017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b" exitCode=0 Nov 26 11:48:50 crc kubenswrapper[4622]: I1126 11:48:50.593435 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerDied","Data":"017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b"} Nov 26 11:48:51 crc kubenswrapper[4622]: I1126 11:48:51.607402 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerStarted","Data":"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16"} Nov 26 11:48:51 crc kubenswrapper[4622]: I1126 11:48:51.632414 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7qsvs" podStartSLOduration=1.921075569 podStartE2EDuration="4.632391648s" podCreationTimestamp="2025-11-26 11:48:47 +0000 UTC" firstStartedPulling="2025-11-26 11:48:48.572563288 +0000 UTC m=+2288.163774810" lastFinishedPulling="2025-11-26 11:48:51.283879367 +0000 UTC m=+2290.875090889" observedRunningTime="2025-11-26 11:48:51.62667044 +0000 UTC m=+2291.217881961" watchObservedRunningTime="2025-11-26 11:48:51.632391648 +0000 UTC m=+2291.223603170" Nov 26 11:48:56 crc kubenswrapper[4622]: I1126 11:48:56.707134 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:48:56 crc kubenswrapper[4622]: E1126 11:48:56.708128 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:48:57 crc kubenswrapper[4622]: I1126 11:48:57.925339 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:57 crc kubenswrapper[4622]: I1126 11:48:57.925647 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:57 crc kubenswrapper[4622]: I1126 11:48:57.966060 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:58 crc kubenswrapper[4622]: I1126 11:48:58.729983 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:48:58 crc kubenswrapper[4622]: I1126 11:48:58.772698 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:49:00 crc kubenswrapper[4622]: I1126 11:49:00.704002 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7qsvs" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="registry-server" containerID="cri-o://ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16" gracePeriod=2 Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.069266 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.107107 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7mv7\" (UniqueName: \"kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7\") pod \"5af4216a-f906-41bb-be3a-9585f4ef4e66\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.107279 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content\") pod \"5af4216a-f906-41bb-be3a-9585f4ef4e66\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.107368 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities\") pod \"5af4216a-f906-41bb-be3a-9585f4ef4e66\" (UID: \"5af4216a-f906-41bb-be3a-9585f4ef4e66\") " Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.108331 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities" (OuterVolumeSpecName: "utilities") pod "5af4216a-f906-41bb-be3a-9585f4ef4e66" (UID: "5af4216a-f906-41bb-be3a-9585f4ef4e66"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.113996 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7" (OuterVolumeSpecName: "kube-api-access-t7mv7") pod "5af4216a-f906-41bb-be3a-9585f4ef4e66" (UID: "5af4216a-f906-41bb-be3a-9585f4ef4e66"). InnerVolumeSpecName "kube-api-access-t7mv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.178071 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5af4216a-f906-41bb-be3a-9585f4ef4e66" (UID: "5af4216a-f906-41bb-be3a-9585f4ef4e66"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.210309 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7mv7\" (UniqueName: \"kubernetes.io/projected/5af4216a-f906-41bb-be3a-9585f4ef4e66-kube-api-access-t7mv7\") on node \"crc\" DevicePath \"\"" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.210336 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.210347 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5af4216a-f906-41bb-be3a-9585f4ef4e66-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.714372 4622 generic.go:334] "Generic (PLEG): container finished" podID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerID="ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16" exitCode=0 Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.714421 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerDied","Data":"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16"} Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.714441 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7qsvs" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.714463 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7qsvs" event={"ID":"5af4216a-f906-41bb-be3a-9585f4ef4e66","Type":"ContainerDied","Data":"da6ba3dd4c9f38a60dc9abe94793caf56cc7f30c9fbb903a351334f6535e377e"} Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.714482 4622 scope.go:117] "RemoveContainer" containerID="ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.736774 4622 scope.go:117] "RemoveContainer" containerID="017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.740876 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.747843 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7qsvs"] Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.776700 4622 scope.go:117] "RemoveContainer" containerID="fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.792461 4622 scope.go:117] "RemoveContainer" containerID="ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16" Nov 26 11:49:01 crc kubenswrapper[4622]: E1126 11:49:01.792785 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16\": container with ID starting with ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16 not found: ID does not exist" containerID="ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.792833 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16"} err="failed to get container status \"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16\": rpc error: code = NotFound desc = could not find container \"ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16\": container with ID starting with ce6347ca0d9a36501ea0e59d9f301d0336f2860eafb71e273fb128365e61bd16 not found: ID does not exist" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.792864 4622 scope.go:117] "RemoveContainer" containerID="017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b" Nov 26 11:49:01 crc kubenswrapper[4622]: E1126 11:49:01.793096 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b\": container with ID starting with 017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b not found: ID does not exist" containerID="017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.793136 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b"} err="failed to get container status \"017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b\": rpc error: code = NotFound desc = could not find container \"017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b\": container with ID starting with 017ee552db5f1753e1cd0d20dde2b82424fd2337e5c767ea53df5ca10335a82b not found: ID does not exist" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.793153 4622 scope.go:117] "RemoveContainer" containerID="fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387" Nov 26 11:49:01 crc kubenswrapper[4622]: E1126 11:49:01.793418 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387\": container with ID starting with fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387 not found: ID does not exist" containerID="fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387" Nov 26 11:49:01 crc kubenswrapper[4622]: I1126 11:49:01.793446 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387"} err="failed to get container status \"fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387\": rpc error: code = NotFound desc = could not find container \"fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387\": container with ID starting with fa4f411153a254611c2536b29f03dd69c25d4b626614a40b76990891c62c6387 not found: ID does not exist" Nov 26 11:49:02 crc kubenswrapper[4622]: I1126 11:49:02.715161 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" path="/var/lib/kubelet/pods/5af4216a-f906-41bb-be3a-9585f4ef4e66/volumes" Nov 26 11:49:10 crc kubenswrapper[4622]: I1126 11:49:10.711034 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:49:10 crc kubenswrapper[4622]: E1126 11:49:10.712300 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:49:22 crc kubenswrapper[4622]: I1126 11:49:22.707486 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:49:22 crc kubenswrapper[4622]: E1126 11:49:22.708743 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:49:36 crc kubenswrapper[4622]: I1126 11:49:36.706353 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:49:36 crc kubenswrapper[4622]: E1126 11:49:36.707380 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:49:50 crc kubenswrapper[4622]: I1126 11:49:50.712233 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:49:50 crc kubenswrapper[4622]: E1126 11:49:50.713619 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:50:03 crc kubenswrapper[4622]: I1126 11:50:03.294638 4622 generic.go:334] "Generic (PLEG): container finished" podID="61960c03-d6c3-417c-8445-a485f622d6b1" containerID="11843b92d298906be65383abee9cad785f4673a0514281aefd6ea4d9d1891dd3" exitCode=0 Nov 26 11:50:03 crc kubenswrapper[4622]: I1126 11:50:03.294736 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" event={"ID":"61960c03-d6c3-417c-8445-a485f622d6b1","Type":"ContainerDied","Data":"11843b92d298906be65383abee9cad785f4673a0514281aefd6ea4d9d1891dd3"} Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.639145 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698017 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698065 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698182 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698290 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698371 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9p2p\" (UniqueName: \"kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.698439 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph\") pod \"61960c03-d6c3-417c-8445-a485f622d6b1\" (UID: \"61960c03-d6c3-417c-8445-a485f622d6b1\") " Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.704847 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p" (OuterVolumeSpecName: "kube-api-access-r9p2p") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "kube-api-access-r9p2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.705634 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.706398 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph" (OuterVolumeSpecName: "ceph") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.728312 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.728795 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory" (OuterVolumeSpecName: "inventory") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.733732 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "61960c03-d6c3-417c-8445-a485f622d6b1" (UID: "61960c03-d6c3-417c-8445-a485f622d6b1"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801619 4622 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801658 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801674 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9p2p\" (UniqueName: \"kubernetes.io/projected/61960c03-d6c3-417c-8445-a485f622d6b1-kube-api-access-r9p2p\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801685 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801699 4622 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:04 crc kubenswrapper[4622]: I1126 11:50:04.801711 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61960c03-d6c3-417c-8445-a485f622d6b1-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.315746 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" event={"ID":"61960c03-d6c3-417c-8445-a485f622d6b1","Type":"ContainerDied","Data":"5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca"} Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.315800 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5074466cce3cd0ae3490349e0e4fa631a564563c8027cf537a784dcca57563ca" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.316238 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.398422 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg"] Nov 26 11:50:05 crc kubenswrapper[4622]: E1126 11:50:05.398824 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="extract-utilities" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.398846 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="extract-utilities" Nov 26 11:50:05 crc kubenswrapper[4622]: E1126 11:50:05.398880 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="registry-server" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.398887 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="registry-server" Nov 26 11:50:05 crc kubenswrapper[4622]: E1126 11:50:05.398904 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="extract-content" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.398909 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="extract-content" Nov 26 11:50:05 crc kubenswrapper[4622]: E1126 11:50:05.398929 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61960c03-d6c3-417c-8445-a485f622d6b1" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.398935 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="61960c03-d6c3-417c-8445-a485f622d6b1" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.399116 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="61960c03-d6c3-417c-8445-a485f622d6b1" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.399141 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5af4216a-f906-41bb-be3a-9585f4ef4e66" containerName="registry-server" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.399798 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.401493 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.402407 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.402642 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.402702 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.406347 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.406396 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.406428 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5mkm8" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.407613 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.413652 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.433088 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg"] Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.517610 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.518657 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.518810 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.518873 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.518914 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519015 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519051 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519190 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519298 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdvsm\" (UniqueName: \"kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519484 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.519710 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.620813 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621198 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621228 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621251 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621280 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdvsm\" (UniqueName: \"kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621332 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621391 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621420 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621444 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.621542 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.622803 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.623002 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.627994 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.628225 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.628308 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.628968 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.629067 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.629788 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.630307 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.630872 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.640122 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdvsm\" (UniqueName: \"kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.706100 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:50:05 crc kubenswrapper[4622]: E1126 11:50:05.706413 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:50:05 crc kubenswrapper[4622]: I1126 11:50:05.715841 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:50:06 crc kubenswrapper[4622]: I1126 11:50:06.277832 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg"] Nov 26 11:50:06 crc kubenswrapper[4622]: I1126 11:50:06.324423 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" event={"ID":"a8397a46-af89-4e1f-90fe-34d0d980d7a6","Type":"ContainerStarted","Data":"380a59f291ffdae493935621690e0eb5a7434ebcb6739ee2c9d12d9ce9833bea"} Nov 26 11:50:08 crc kubenswrapper[4622]: I1126 11:50:08.344194 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" event={"ID":"a8397a46-af89-4e1f-90fe-34d0d980d7a6","Type":"ContainerStarted","Data":"426cd2d3ad21eefa1bbfda5cd2e50bb1d934e8bb9ffe7d5ccaa886d0069343e1"} Nov 26 11:50:08 crc kubenswrapper[4622]: I1126 11:50:08.365781 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" podStartSLOduration=2.5101667130000003 podStartE2EDuration="3.365765252s" podCreationTimestamp="2025-11-26 11:50:05 +0000 UTC" firstStartedPulling="2025-11-26 11:50:06.284321616 +0000 UTC m=+2365.875533138" lastFinishedPulling="2025-11-26 11:50:07.139920156 +0000 UTC m=+2366.731131677" observedRunningTime="2025-11-26 11:50:08.362188917 +0000 UTC m=+2367.953400439" watchObservedRunningTime="2025-11-26 11:50:08.365765252 +0000 UTC m=+2367.956976774" Nov 26 11:50:18 crc kubenswrapper[4622]: I1126 11:50:18.706780 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:50:18 crc kubenswrapper[4622]: E1126 11:50:18.707588 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:50:31 crc kubenswrapper[4622]: I1126 11:50:31.706445 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:50:31 crc kubenswrapper[4622]: E1126 11:50:31.707428 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:50:42 crc kubenswrapper[4622]: I1126 11:50:42.707075 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:50:42 crc kubenswrapper[4622]: E1126 11:50:42.708021 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:50:55 crc kubenswrapper[4622]: I1126 11:50:55.707156 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:50:55 crc kubenswrapper[4622]: E1126 11:50:55.708316 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:51:07 crc kubenswrapper[4622]: I1126 11:51:07.706281 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:51:07 crc kubenswrapper[4622]: E1126 11:51:07.707550 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:51:19 crc kubenswrapper[4622]: I1126 11:51:19.707027 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:51:19 crc kubenswrapper[4622]: E1126 11:51:19.708072 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:51:32 crc kubenswrapper[4622]: I1126 11:51:32.706339 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:51:32 crc kubenswrapper[4622]: E1126 11:51:32.707519 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:51:44 crc kubenswrapper[4622]: I1126 11:51:44.707183 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:51:44 crc kubenswrapper[4622]: E1126 11:51:44.708240 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:51:57 crc kubenswrapper[4622]: I1126 11:51:57.706788 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:51:58 crc kubenswrapper[4622]: I1126 11:51:58.368027 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6"} Nov 26 11:52:15 crc kubenswrapper[4622]: I1126 11:52:15.533811 4622 generic.go:334] "Generic (PLEG): container finished" podID="a8397a46-af89-4e1f-90fe-34d0d980d7a6" containerID="426cd2d3ad21eefa1bbfda5cd2e50bb1d934e8bb9ffe7d5ccaa886d0069343e1" exitCode=0 Nov 26 11:52:15 crc kubenswrapper[4622]: I1126 11:52:15.533896 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" event={"ID":"a8397a46-af89-4e1f-90fe-34d0d980d7a6","Type":"ContainerDied","Data":"426cd2d3ad21eefa1bbfda5cd2e50bb1d934e8bb9ffe7d5ccaa886d0069343e1"} Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.884259 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.910268 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.910313 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.910396 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.910415 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.910483 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911325 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911366 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911399 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911530 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911589 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.911613 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdvsm\" (UniqueName: \"kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm\") pod \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\" (UID: \"a8397a46-af89-4e1f-90fe-34d0d980d7a6\") " Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.916253 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm" (OuterVolumeSpecName: "kube-api-access-qdvsm") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "kube-api-access-qdvsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.916662 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph" (OuterVolumeSpecName: "ceph") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.924588 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.942062 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.942899 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.943313 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.943419 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.944566 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.945457 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.947049 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory" (OuterVolumeSpecName: "inventory") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:16 crc kubenswrapper[4622]: I1126 11:52:16.948430 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a8397a46-af89-4e1f-90fe-34d0d980d7a6" (UID: "a8397a46-af89-4e1f-90fe-34d0d980d7a6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014197 4622 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014229 4622 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014265 4622 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014279 4622 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014292 4622 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014301 4622 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-inventory\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014309 4622 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014319 4622 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014328 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdvsm\" (UniqueName: \"kubernetes.io/projected/a8397a46-af89-4e1f-90fe-34d0d980d7a6-kube-api-access-qdvsm\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014335 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.014344 4622 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8397a46-af89-4e1f-90fe-34d0d980d7a6-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.558964 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" event={"ID":"a8397a46-af89-4e1f-90fe-34d0d980d7a6","Type":"ContainerDied","Data":"380a59f291ffdae493935621690e0eb5a7434ebcb6739ee2c9d12d9ce9833bea"} Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.559030 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="380a59f291ffdae493935621690e0eb5a7434ebcb6739ee2c9d12d9ce9833bea" Nov 26 11:52:17 crc kubenswrapper[4622]: I1126 11:52:17.559033 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.176766 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 26 11:52:29 crc kubenswrapper[4622]: E1126 11:52:29.177815 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8397a46-af89-4e1f-90fe-34d0d980d7a6" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.177833 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8397a46-af89-4e1f-90fe-34d0d980d7a6" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.178082 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8397a46-af89-4e1f-90fe-34d0d980d7a6" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.179014 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.181237 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.186320 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.200004 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.201866 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.203456 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.216104 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.224638 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236639 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236686 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236707 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236735 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236755 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-dev\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236772 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236793 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236816 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236831 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236863 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-sys\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236883 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236897 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236918 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-ceph\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236936 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236958 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.236986 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcbm5\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-kube-api-access-pcbm5\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237010 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237027 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237059 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-scripts\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237081 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237103 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237120 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237148 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8h68\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-kube-api-access-w8h68\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237168 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-lib-modules\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237193 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-run\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237224 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237251 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-run\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237264 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237281 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237302 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237315 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.237329 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338161 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338210 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338242 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338293 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338321 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338345 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338352 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338400 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338367 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338446 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338479 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-dev\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338536 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338468 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338573 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338576 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338666 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338619 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338617 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-dev\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338782 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338818 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338871 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338914 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.338989 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339023 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-sys\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339054 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339056 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-sys\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339082 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339127 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-ceph\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339164 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339191 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339197 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339269 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339295 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcbm5\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-kube-api-access-pcbm5\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339455 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339530 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339550 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339565 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-scripts\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339624 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339672 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339718 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339798 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8h68\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-kube-api-access-w8h68\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339831 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-lib-modules\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.339899 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-run\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340001 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340081 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-run\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340110 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340133 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340650 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-lib-modules\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.340694 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.341662 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-run\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.341780 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/950a31f4-607a-4c61-aa2c-606959f96b3b-run\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.341848 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9f0ed356-b135-473d-ad4c-8e21c287af13-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.346347 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.346752 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-ceph\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.347204 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.347844 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.348367 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.348560 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.349865 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-config-data\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.350466 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.354717 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0ed356-b135-473d-ad4c-8e21c287af13-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.359730 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcbm5\" (UniqueName: \"kubernetes.io/projected/9f0ed356-b135-473d-ad4c-8e21c287af13-kube-api-access-pcbm5\") pod \"cinder-volume-volume1-0\" (UID: \"9f0ed356-b135-473d-ad4c-8e21c287af13\") " pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.360457 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/950a31f4-607a-4c61-aa2c-606959f96b3b-scripts\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.368302 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8h68\" (UniqueName: \"kubernetes.io/projected/950a31f4-607a-4c61-aa2c-606959f96b3b-kube-api-access-w8h68\") pod \"cinder-backup-0\" (UID: \"950a31f4-607a-4c61-aa2c-606959f96b3b\") " pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.496316 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.526958 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.779645 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-7kxg5"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.781514 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.792491 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-7kxg5"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.881992 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8tl5\" (UniqueName: \"kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.882341 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.882632 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-9ad5-account-create-update-sg5r5"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.884091 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.886015 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.890538 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9ad5-account-create-update-sg5r5"] Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.984128 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.984232 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8tl5\" (UniqueName: \"kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.984302 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.984336 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnktm\" (UniqueName: \"kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:29 crc kubenswrapper[4622]: I1126 11:52:29.985659 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.003395 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8tl5\" (UniqueName: \"kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5\") pod \"manila-db-create-7kxg5\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.046004 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.047738 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.051000 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-62dwh" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.051067 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.051016 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.051423 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.057430 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089106 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4vj6\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-kube-api-access-c4vj6\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089195 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnktm\" (UniqueName: \"kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089248 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-logs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089351 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-ceph\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089370 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089428 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089542 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089571 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089613 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089643 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.089736 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.090873 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.098007 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.100031 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.101415 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.103926 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.104194 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.104607 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.108191 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.108251 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnktm\" (UniqueName: \"kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm\") pod \"manila-9ad5-account-create-update-sg5r5\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.112348 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.191770 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.191850 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.191886 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.191926 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192062 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192138 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192203 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192222 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192237 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192258 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192317 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4vj6\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-kube-api-access-c4vj6\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192362 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192374 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192560 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-logs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192749 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192788 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp7n4\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-kube-api-access-hp7n4\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192817 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-ceph\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192820 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-logs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192835 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.192923 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.195715 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-scripts\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.199738 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.200131 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.202004 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-ceph\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.202468 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.202789 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-config-data\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.211746 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4vj6\" (UniqueName: \"kubernetes.io/projected/5cad4cf3-7cf8-43eb-b48d-954bb0d60d03-kube-api-access-c4vj6\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.222914 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03\") " pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295067 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295127 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295156 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295183 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295209 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295265 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295352 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295380 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp7n4\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-kube-api-access-hp7n4\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295490 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.295895 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.298909 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.299913 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.300051 4622 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.300393 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.300688 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6cb4914-4902-4e5a-8c29-34f4136736f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.303388 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.303780 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6cb4914-4902-4e5a-8c29-34f4136736f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.317137 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp7n4\" (UniqueName: \"kubernetes.io/projected/e6cb4914-4902-4e5a-8c29-34f4136736f0-kube-api-access-hp7n4\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.321387 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e6cb4914-4902-4e5a-8c29-34f4136736f0\") " pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.364985 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.420110 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.502637 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-7kxg5"] Nov 26 11:52:30 crc kubenswrapper[4622]: W1126 11:52:30.516474 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda36f9d11_1586_4876_afeb_a197d5479f7c.slice/crio-5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116 WatchSource:0}: Error finding container 5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116: Status 404 returned error can't find the container with id 5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116 Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.588732 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9ad5-account-create-update-sg5r5"] Nov 26 11:52:30 crc kubenswrapper[4622]: W1126 11:52:30.603927 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b956c55_fa7f_474e_a13a_a410f68ce795.slice/crio-09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1 WatchSource:0}: Error finding container 09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1: Status 404 returned error can't find the container with id 09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1 Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.625187 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: W1126 11:52:30.631376 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f0ed356_b135_473d_ad4c_8e21c287af13.slice/crio-13904113b6ed95f10a9d031328eaf168c3681fea5b7b70a2f20be4604b93511d WatchSource:0}: Error finding container 13904113b6ed95f10a9d031328eaf168c3681fea5b7b70a2f20be4604b93511d: Status 404 returned error can't find the container with id 13904113b6ed95f10a9d031328eaf168c3681fea5b7b70a2f20be4604b93511d Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.690766 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9ad5-account-create-update-sg5r5" event={"ID":"2b956c55-fa7f-474e-a13a-a410f68ce795","Type":"ContainerStarted","Data":"09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1"} Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.692446 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9f0ed356-b135-473d-ad4c-8e21c287af13","Type":"ContainerStarted","Data":"13904113b6ed95f10a9d031328eaf168c3681fea5b7b70a2f20be4604b93511d"} Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.694069 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7kxg5" event={"ID":"a36f9d11-1586-4876-afeb-a197d5479f7c","Type":"ContainerStarted","Data":"cc98ab4c4c58f1442afda7cfe63b3f049bad159cd4f14eda5844ec95e451f08f"} Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.694097 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7kxg5" event={"ID":"a36f9d11-1586-4876-afeb-a197d5479f7c","Type":"ContainerStarted","Data":"5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116"} Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.695730 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"950a31f4-607a-4c61-aa2c-606959f96b3b","Type":"ContainerStarted","Data":"b2d70070efaf1b8062baf9448a319295dde2a333a3832ac1a2ba071b8744c463"} Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.714280 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-7kxg5" podStartSLOduration=1.7142594249999998 podStartE2EDuration="1.714259425s" podCreationTimestamp="2025-11-26 11:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:30.709331082 +0000 UTC m=+2510.300542605" watchObservedRunningTime="2025-11-26 11:52:30.714259425 +0000 UTC m=+2510.305470948" Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.867775 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 26 11:52:30 crc kubenswrapper[4622]: W1126 11:52:30.880707 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cad4cf3_7cf8_43eb_b48d_954bb0d60d03.slice/crio-8b70751b74182544ed7d720da9bd22a6418fb12f730124564ae1302082f45ad6 WatchSource:0}: Error finding container 8b70751b74182544ed7d720da9bd22a6418fb12f730124564ae1302082f45ad6: Status 404 returned error can't find the container with id 8b70751b74182544ed7d720da9bd22a6418fb12f730124564ae1302082f45ad6 Nov 26 11:52:30 crc kubenswrapper[4622]: I1126 11:52:30.973570 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 26 11:52:31 crc kubenswrapper[4622]: W1126 11:52:31.003037 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6cb4914_4902_4e5a_8c29_34f4136736f0.slice/crio-fa51e114b6df4d018aacee8ec5f51039b3fbbc23fec7fc15370aaa5b0b0a3b0d WatchSource:0}: Error finding container fa51e114b6df4d018aacee8ec5f51039b3fbbc23fec7fc15370aaa5b0b0a3b0d: Status 404 returned error can't find the container with id fa51e114b6df4d018aacee8ec5f51039b3fbbc23fec7fc15370aaa5b0b0a3b0d Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.772711 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03","Type":"ContainerStarted","Data":"9848717af832bcdbdfe35b901965f15914b9a560402d0c2029964db13fdbffeb"} Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.773255 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03","Type":"ContainerStarted","Data":"8b70751b74182544ed7d720da9bd22a6418fb12f730124564ae1302082f45ad6"} Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.787370 4622 generic.go:334] "Generic (PLEG): container finished" podID="2b956c55-fa7f-474e-a13a-a410f68ce795" containerID="2eed1bb67ac4da101e35ac2f5d3ac53bba65cfb9ffb791ec07827d2a7a3038e8" exitCode=0 Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.787445 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9ad5-account-create-update-sg5r5" event={"ID":"2b956c55-fa7f-474e-a13a-a410f68ce795","Type":"ContainerDied","Data":"2eed1bb67ac4da101e35ac2f5d3ac53bba65cfb9ffb791ec07827d2a7a3038e8"} Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.807085 4622 generic.go:334] "Generic (PLEG): container finished" podID="a36f9d11-1586-4876-afeb-a197d5479f7c" containerID="cc98ab4c4c58f1442afda7cfe63b3f049bad159cd4f14eda5844ec95e451f08f" exitCode=0 Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.807154 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7kxg5" event={"ID":"a36f9d11-1586-4876-afeb-a197d5479f7c","Type":"ContainerDied","Data":"cc98ab4c4c58f1442afda7cfe63b3f049bad159cd4f14eda5844ec95e451f08f"} Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.817145 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e6cb4914-4902-4e5a-8c29-34f4136736f0","Type":"ContainerStarted","Data":"8891192e9d5a333edc3f511f36d09784344622657ef5d1bf3d42aaaf2f45b8ba"} Nov 26 11:52:31 crc kubenswrapper[4622]: I1126 11:52:31.817174 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e6cb4914-4902-4e5a-8c29-34f4136736f0","Type":"ContainerStarted","Data":"fa51e114b6df4d018aacee8ec5f51039b3fbbc23fec7fc15370aaa5b0b0a3b0d"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.829385 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"950a31f4-607a-4c61-aa2c-606959f96b3b","Type":"ContainerStarted","Data":"8105805f6d075845da5dca9062c9f5ae7a491db9faffc5e9f85aca111f0417e9"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.831314 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"950a31f4-607a-4c61-aa2c-606959f96b3b","Type":"ContainerStarted","Data":"bd57ada0131e08ac509305ffc51727f910fb872dfe075357fb7282d1b1c525bf"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.831888 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e6cb4914-4902-4e5a-8c29-34f4136736f0","Type":"ContainerStarted","Data":"237be3b08090e41252772220f2e5ebafca09e0f9aa070a558b9e0414b51b53ca"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.834239 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5cad4cf3-7cf8-43eb-b48d-954bb0d60d03","Type":"ContainerStarted","Data":"04268a1aa839536e0c660360e889cf918b15cec87d8b0133a6c9ef0707ea36ef"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.836767 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9f0ed356-b135-473d-ad4c-8e21c287af13","Type":"ContainerStarted","Data":"d1cdd109c5ccbceae0c3789a433747ee171bccf0c81b263b2fa7e2a962b38848"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.836838 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9f0ed356-b135-473d-ad4c-8e21c287af13","Type":"ContainerStarted","Data":"41d3f3497da47daff6d8d900243814e155286b7b1ba6df552a6f789ee5581943"} Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.858025 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.416507995 podStartE2EDuration="3.858006115s" podCreationTimestamp="2025-11-26 11:52:29 +0000 UTC" firstStartedPulling="2025-11-26 11:52:30.101166316 +0000 UTC m=+2509.692377837" lastFinishedPulling="2025-11-26 11:52:31.542664435 +0000 UTC m=+2511.133875957" observedRunningTime="2025-11-26 11:52:32.856610884 +0000 UTC m=+2512.447822407" watchObservedRunningTime="2025-11-26 11:52:32.858006115 +0000 UTC m=+2512.449217627" Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.889228 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.5004041299999997 podStartE2EDuration="3.889202739s" podCreationTimestamp="2025-11-26 11:52:29 +0000 UTC" firstStartedPulling="2025-11-26 11:52:30.638743103 +0000 UTC m=+2510.229954616" lastFinishedPulling="2025-11-26 11:52:32.027541703 +0000 UTC m=+2511.618753225" observedRunningTime="2025-11-26 11:52:32.886255912 +0000 UTC m=+2512.477467434" watchObservedRunningTime="2025-11-26 11:52:32.889202739 +0000 UTC m=+2512.480414261" Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.912419 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.912399539 podStartE2EDuration="3.912399539s" podCreationTimestamp="2025-11-26 11:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:32.906979388 +0000 UTC m=+2512.498190911" watchObservedRunningTime="2025-11-26 11:52:32.912399539 +0000 UTC m=+2512.503611061" Nov 26 11:52:32 crc kubenswrapper[4622]: I1126 11:52:32.935663 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.935644198 podStartE2EDuration="3.935644198s" podCreationTimestamp="2025-11-26 11:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:32.928889482 +0000 UTC m=+2512.520101004" watchObservedRunningTime="2025-11-26 11:52:32.935644198 +0000 UTC m=+2512.526855720" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.248269 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.253362 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.280585 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8tl5\" (UniqueName: \"kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5\") pod \"a36f9d11-1586-4876-afeb-a197d5479f7c\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.280721 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts\") pod \"2b956c55-fa7f-474e-a13a-a410f68ce795\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.280850 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnktm\" (UniqueName: \"kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm\") pod \"2b956c55-fa7f-474e-a13a-a410f68ce795\" (UID: \"2b956c55-fa7f-474e-a13a-a410f68ce795\") " Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.280889 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts\") pod \"a36f9d11-1586-4876-afeb-a197d5479f7c\" (UID: \"a36f9d11-1586-4876-afeb-a197d5479f7c\") " Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.282262 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a36f9d11-1586-4876-afeb-a197d5479f7c" (UID: "a36f9d11-1586-4876-afeb-a197d5479f7c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.283263 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2b956c55-fa7f-474e-a13a-a410f68ce795" (UID: "2b956c55-fa7f-474e-a13a-a410f68ce795"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.295851 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm" (OuterVolumeSpecName: "kube-api-access-lnktm") pod "2b956c55-fa7f-474e-a13a-a410f68ce795" (UID: "2b956c55-fa7f-474e-a13a-a410f68ce795"). InnerVolumeSpecName "kube-api-access-lnktm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.296325 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5" (OuterVolumeSpecName: "kube-api-access-x8tl5") pod "a36f9d11-1586-4876-afeb-a197d5479f7c" (UID: "a36f9d11-1586-4876-afeb-a197d5479f7c"). InnerVolumeSpecName "kube-api-access-x8tl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.383726 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b956c55-fa7f-474e-a13a-a410f68ce795-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.384087 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnktm\" (UniqueName: \"kubernetes.io/projected/2b956c55-fa7f-474e-a13a-a410f68ce795-kube-api-access-lnktm\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.384156 4622 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a36f9d11-1586-4876-afeb-a197d5479f7c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.384212 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8tl5\" (UniqueName: \"kubernetes.io/projected/a36f9d11-1586-4876-afeb-a197d5479f7c-kube-api-access-x8tl5\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.855795 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9ad5-account-create-update-sg5r5" event={"ID":"2b956c55-fa7f-474e-a13a-a410f68ce795","Type":"ContainerDied","Data":"09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1"} Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.856076 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09ed9ca93bafdb9e38761fbdfac2eb7ff6ffd27467b08e2f547fade1fd1d4bd1" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.855841 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9ad5-account-create-update-sg5r5" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.857357 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7kxg5" event={"ID":"a36f9d11-1586-4876-afeb-a197d5479f7c","Type":"ContainerDied","Data":"5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116"} Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.857417 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be343009bda5eef28c9fe6b80c7c61c3ddd2b4bfc9cb0c82a3db5566cc72116" Nov 26 11:52:33 crc kubenswrapper[4622]: I1126 11:52:33.857445 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7kxg5" Nov 26 11:52:34 crc kubenswrapper[4622]: I1126 11:52:34.496807 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:34 crc kubenswrapper[4622]: I1126 11:52:34.527284 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.230408 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-mq94p"] Nov 26 11:52:35 crc kubenswrapper[4622]: E1126 11:52:35.231140 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b956c55-fa7f-474e-a13a-a410f68ce795" containerName="mariadb-account-create-update" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.231156 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b956c55-fa7f-474e-a13a-a410f68ce795" containerName="mariadb-account-create-update" Nov 26 11:52:35 crc kubenswrapper[4622]: E1126 11:52:35.231174 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a36f9d11-1586-4876-afeb-a197d5479f7c" containerName="mariadb-database-create" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.231180 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="a36f9d11-1586-4876-afeb-a197d5479f7c" containerName="mariadb-database-create" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.231348 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b956c55-fa7f-474e-a13a-a410f68ce795" containerName="mariadb-account-create-update" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.231377 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="a36f9d11-1586-4876-afeb-a197d5479f7c" containerName="mariadb-database-create" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.232036 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.234964 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-csl4l" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.235083 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.254813 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-mq94p"] Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.335969 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.336018 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4tlf\" (UniqueName: \"kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.336248 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.336472 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.439869 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.440013 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.440053 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4tlf\" (UniqueName: \"kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.440285 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.454836 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.455213 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.457689 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.459672 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4tlf\" (UniqueName: \"kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf\") pod \"manila-db-sync-mq94p\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:35 crc kubenswrapper[4622]: I1126 11:52:35.551779 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:36 crc kubenswrapper[4622]: I1126 11:52:36.052387 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-mq94p"] Nov 26 11:52:36 crc kubenswrapper[4622]: I1126 11:52:36.900759 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mq94p" event={"ID":"ae6e0966-f7e8-4460-97e1-2c9effc3f080","Type":"ContainerStarted","Data":"02963a77d2cf5c954ee5c8024cd3c4257def319698c9070e4ea1147f7bec308c"} Nov 26 11:52:39 crc kubenswrapper[4622]: I1126 11:52:39.686813 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Nov 26 11:52:39 crc kubenswrapper[4622]: I1126 11:52:39.697048 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.366092 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.366168 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.406423 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.416002 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.420904 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.420971 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.456877 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.465167 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.945205 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mq94p" event={"ID":"ae6e0966-f7e8-4460-97e1-2c9effc3f080","Type":"ContainerStarted","Data":"bff967207a4747f826e61edde3a078c16e5ab51886aa1670e85f3d83552bb10e"} Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.946180 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.946392 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.946411 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.946425 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:40 crc kubenswrapper[4622]: I1126 11:52:40.989918 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-mq94p" podStartSLOduration=1.576059816 podStartE2EDuration="5.98989291s" podCreationTimestamp="2025-11-26 11:52:35 +0000 UTC" firstStartedPulling="2025-11-26 11:52:36.061316653 +0000 UTC m=+2515.652528175" lastFinishedPulling="2025-11-26 11:52:40.475149746 +0000 UTC m=+2520.066361269" observedRunningTime="2025-11-26 11:52:40.98740039 +0000 UTC m=+2520.578611912" watchObservedRunningTime="2025-11-26 11:52:40.98989291 +0000 UTC m=+2520.581104431" Nov 26 11:52:42 crc kubenswrapper[4622]: I1126 11:52:42.769415 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:42 crc kubenswrapper[4622]: I1126 11:52:42.772141 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 26 11:52:42 crc kubenswrapper[4622]: I1126 11:52:42.793128 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 26 11:52:42 crc kubenswrapper[4622]: I1126 11:52:42.794486 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 26 11:52:44 crc kubenswrapper[4622]: I1126 11:52:44.994657 4622 generic.go:334] "Generic (PLEG): container finished" podID="ae6e0966-f7e8-4460-97e1-2c9effc3f080" containerID="bff967207a4747f826e61edde3a078c16e5ab51886aa1670e85f3d83552bb10e" exitCode=0 Nov 26 11:52:44 crc kubenswrapper[4622]: I1126 11:52:44.996042 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mq94p" event={"ID":"ae6e0966-f7e8-4460-97e1-2c9effc3f080","Type":"ContainerDied","Data":"bff967207a4747f826e61edde3a078c16e5ab51886aa1670e85f3d83552bb10e"} Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.358813 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.538878 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data\") pod \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.538976 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4tlf\" (UniqueName: \"kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf\") pod \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.539030 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data\") pod \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.539448 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle\") pod \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\" (UID: \"ae6e0966-f7e8-4460-97e1-2c9effc3f080\") " Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.545053 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf" (OuterVolumeSpecName: "kube-api-access-k4tlf") pod "ae6e0966-f7e8-4460-97e1-2c9effc3f080" (UID: "ae6e0966-f7e8-4460-97e1-2c9effc3f080"). InnerVolumeSpecName "kube-api-access-k4tlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.552031 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data" (OuterVolumeSpecName: "config-data") pod "ae6e0966-f7e8-4460-97e1-2c9effc3f080" (UID: "ae6e0966-f7e8-4460-97e1-2c9effc3f080"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.555160 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "ae6e0966-f7e8-4460-97e1-2c9effc3f080" (UID: "ae6e0966-f7e8-4460-97e1-2c9effc3f080"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.576763 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae6e0966-f7e8-4460-97e1-2c9effc3f080" (UID: "ae6e0966-f7e8-4460-97e1-2c9effc3f080"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.642622 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.642657 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.642673 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4tlf\" (UniqueName: \"kubernetes.io/projected/ae6e0966-f7e8-4460-97e1-2c9effc3f080-kube-api-access-k4tlf\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:46 crc kubenswrapper[4622]: I1126 11:52:46.642683 4622 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ae6e0966-f7e8-4460-97e1-2c9effc3f080-job-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.020097 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mq94p" event={"ID":"ae6e0966-f7e8-4460-97e1-2c9effc3f080","Type":"ContainerDied","Data":"02963a77d2cf5c954ee5c8024cd3c4257def319698c9070e4ea1147f7bec308c"} Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.020563 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02963a77d2cf5c954ee5c8024cd3c4257def319698c9070e4ea1147f7bec308c" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.020595 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mq94p" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.405298 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: E1126 11:52:47.406149 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6e0966-f7e8-4460-97e1-2c9effc3f080" containerName="manila-db-sync" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.406166 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6e0966-f7e8-4460-97e1-2c9effc3f080" containerName="manila-db-sync" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.406438 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae6e0966-f7e8-4460-97e1-2c9effc3f080" containerName="manila-db-sync" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.407437 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.409861 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.410111 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.410258 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-csl4l" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.411022 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.415597 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.421487 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.426035 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.429428 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.457935 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568307 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568377 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568403 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568432 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568471 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkc44\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568487 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568529 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568555 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpx58\" (UniqueName: \"kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568577 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568626 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568657 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568673 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568698 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.568735 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.624541 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6885d49d55-25kwv"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.625956 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.649821 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6885d49d55-25kwv"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.670840 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.670904 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.670931 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-sb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.670963 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671006 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671027 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-openstack-edpm-ipam\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671046 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671074 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671105 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-nb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671127 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671144 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkc44\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671168 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671192 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpx58\" (UniqueName: \"kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671215 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671265 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-config\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671287 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671309 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-dns-svc\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671327 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6vqw\" (UniqueName: \"kubernetes.io/projected/7144d84c-d70a-4566-bb04-1b0cb8e058fb-kube-api-access-t6vqw\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671351 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.671365 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.672373 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.673335 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.673449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.678795 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.680822 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.691195 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.693464 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.693960 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.695020 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.697525 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.698043 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpx58\" (UniqueName: \"kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.698302 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.708173 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.709890 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkc44\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44\") pod \"manila-share-share1-0\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.727160 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.745034 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774167 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-nb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774260 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-config\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774295 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-dns-svc\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774318 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6vqw\" (UniqueName: \"kubernetes.io/projected/7144d84c-d70a-4566-bb04-1b0cb8e058fb-kube-api-access-t6vqw\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774366 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-sb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.774413 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-openstack-edpm-ipam\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.775277 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-openstack-edpm-ipam\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.776215 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-nb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.776697 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-dns-svc\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.777056 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-ovsdbserver-sb\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.777212 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7144d84c-d70a-4566-bb04-1b0cb8e058fb-config\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.808157 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6vqw\" (UniqueName: \"kubernetes.io/projected/7144d84c-d70a-4566-bb04-1b0cb8e058fb-kube-api-access-t6vqw\") pod \"dnsmasq-dns-6885d49d55-25kwv\" (UID: \"7144d84c-d70a-4566-bb04-1b0cb8e058fb\") " pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.810598 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.812106 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.816584 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.822827 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878249 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878286 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878365 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pjpf\" (UniqueName: \"kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878385 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878437 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878452 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.878482 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.976453 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.983637 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.983682 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.983839 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pjpf\" (UniqueName: \"kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.983881 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.984036 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.984065 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.984135 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.989013 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.991000 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.991341 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.991629 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.995894 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:47 crc kubenswrapper[4622]: I1126 11:52:47.996294 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.006244 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pjpf\" (UniqueName: \"kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf\") pod \"manila-api-0\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " pod="openstack/manila-api-0" Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.192115 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.380249 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:52:48 crc kubenswrapper[4622]: W1126 11:52:48.380311 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcaa72da5_8c2e_4830_b830_c4d894f9bb5d.slice/crio-69ae7f201c7f0cc3c49af4c94bca1a466022cfee30032b88586e7257caedc305 WatchSource:0}: Error finding container 69ae7f201c7f0cc3c49af4c94bca1a466022cfee30032b88586e7257caedc305: Status 404 returned error can't find the container with id 69ae7f201c7f0cc3c49af4c94bca1a466022cfee30032b88586e7257caedc305 Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.424530 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:52:48 crc kubenswrapper[4622]: W1126 11:52:48.427044 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cb716dd_ba25_414a_b7f1_57cff3df83ea.slice/crio-444ac08475b3356737da296c2c8a2a8d0851ba182bf23d7cf9abd669b8cd50e2 WatchSource:0}: Error finding container 444ac08475b3356737da296c2c8a2a8d0851ba182bf23d7cf9abd669b8cd50e2: Status 404 returned error can't find the container with id 444ac08475b3356737da296c2c8a2a8d0851ba182bf23d7cf9abd669b8cd50e2 Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.507190 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6885d49d55-25kwv"] Nov 26 11:52:48 crc kubenswrapper[4622]: W1126 11:52:48.510989 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7144d84c_d70a_4566_bb04_1b0cb8e058fb.slice/crio-a7679b72745a5158c52e5813de55ec16c5730c3cfccbe678e7e509f91def0b2f WatchSource:0}: Error finding container a7679b72745a5158c52e5813de55ec16c5730c3cfccbe678e7e509f91def0b2f: Status 404 returned error can't find the container with id a7679b72745a5158c52e5813de55ec16c5730c3cfccbe678e7e509f91def0b2f Nov 26 11:52:48 crc kubenswrapper[4622]: I1126 11:52:48.719439 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.054416 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerStarted","Data":"4d4096807c0b2c2769916450e5ba9b9eed569d084d6a3277586981b2235c482f"} Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.058181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerStarted","Data":"69ae7f201c7f0cc3c49af4c94bca1a466022cfee30032b88586e7257caedc305"} Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.060214 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerStarted","Data":"444ac08475b3356737da296c2c8a2a8d0851ba182bf23d7cf9abd669b8cd50e2"} Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.063972 4622 generic.go:334] "Generic (PLEG): container finished" podID="7144d84c-d70a-4566-bb04-1b0cb8e058fb" containerID="6a6a7ee991b5a6dad3ca05a92dbc9d11d45c1b026a77e125a813fc3521fb0f1a" exitCode=0 Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.063999 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" event={"ID":"7144d84c-d70a-4566-bb04-1b0cb8e058fb","Type":"ContainerDied","Data":"6a6a7ee991b5a6dad3ca05a92dbc9d11d45c1b026a77e125a813fc3521fb0f1a"} Nov 26 11:52:49 crc kubenswrapper[4622]: I1126 11:52:49.064016 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" event={"ID":"7144d84c-d70a-4566-bb04-1b0cb8e058fb","Type":"ContainerStarted","Data":"a7679b72745a5158c52e5813de55ec16c5730c3cfccbe678e7e509f91def0b2f"} Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.077256 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerStarted","Data":"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db"} Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.077744 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerStarted","Data":"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f"} Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.078889 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.081994 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerStarted","Data":"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114"} Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.084204 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" event={"ID":"7144d84c-d70a-4566-bb04-1b0cb8e058fb","Type":"ContainerStarted","Data":"287b7bf15cf0aeb1f336320eb0d45a01bfa6a8f28d8d6f8534612307cdd59b8f"} Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.084329 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.102649 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.102625442 podStartE2EDuration="3.102625442s" podCreationTimestamp="2025-11-26 11:52:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:50.092931284 +0000 UTC m=+2529.684142806" watchObservedRunningTime="2025-11-26 11:52:50.102625442 +0000 UTC m=+2529.693836965" Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.126551 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" podStartSLOduration=3.126521561 podStartE2EDuration="3.126521561s" podCreationTimestamp="2025-11-26 11:52:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:50.1159938 +0000 UTC m=+2529.707205312" watchObservedRunningTime="2025-11-26 11:52:50.126521561 +0000 UTC m=+2529.717733083" Nov 26 11:52:50 crc kubenswrapper[4622]: I1126 11:52:50.672181 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:51 crc kubenswrapper[4622]: I1126 11:52:51.099816 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerStarted","Data":"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec"} Nov 26 11:52:51 crc kubenswrapper[4622]: I1126 11:52:51.123229 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.217783143 podStartE2EDuration="4.123213981s" podCreationTimestamp="2025-11-26 11:52:47 +0000 UTC" firstStartedPulling="2025-11-26 11:52:48.429080328 +0000 UTC m=+2528.020291851" lastFinishedPulling="2025-11-26 11:52:49.334511167 +0000 UTC m=+2528.925722689" observedRunningTime="2025-11-26 11:52:51.118515221 +0000 UTC m=+2530.709726743" watchObservedRunningTime="2025-11-26 11:52:51.123213981 +0000 UTC m=+2530.714425503" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.112734 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api-log" containerID="cri-o://5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" gracePeriod=30 Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.112795 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api" containerID="cri-o://2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" gracePeriod=30 Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.783014 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.826555 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.826834 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.827579 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.827723 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pjpf\" (UniqueName: \"kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.827914 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.827958 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.828156 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.828244 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs" (OuterVolumeSpecName: "logs") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.828273 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle\") pod \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\" (UID: \"91ed7c3e-d3c7-402b-9429-526a5555a8bf\") " Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.830075 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/91ed7c3e-d3c7-402b-9429-526a5555a8bf-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.830099 4622 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91ed7c3e-d3c7-402b-9429-526a5555a8bf-logs\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.836979 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts" (OuterVolumeSpecName: "scripts") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.838735 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf" (OuterVolumeSpecName: "kube-api-access-9pjpf") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "kube-api-access-9pjpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.840890 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.865579 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.890047 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data" (OuterVolumeSpecName: "config-data") pod "91ed7c3e-d3c7-402b-9429-526a5555a8bf" (UID: "91ed7c3e-d3c7-402b-9429-526a5555a8bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.932435 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pjpf\" (UniqueName: \"kubernetes.io/projected/91ed7c3e-d3c7-402b-9429-526a5555a8bf-kube-api-access-9pjpf\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.932468 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.932482 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.932492 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.932516 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ed7c3e-d3c7-402b-9429-526a5555a8bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.972322 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.972829 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="sg-core" containerID="cri-o://c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17" gracePeriod=30 Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.972976 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-notification-agent" containerID="cri-o://e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312" gracePeriod=30 Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.973024 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="proxy-httpd" containerID="cri-o://47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf" gracePeriod=30 Nov 26 11:52:52 crc kubenswrapper[4622]: I1126 11:52:52.973224 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-central-agent" containerID="cri-o://95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad" gracePeriod=30 Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.127389 4622 generic.go:334] "Generic (PLEG): container finished" podID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerID="47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf" exitCode=0 Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.127720 4622 generic.go:334] "Generic (PLEG): container finished" podID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerID="c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17" exitCode=2 Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.127530 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerDied","Data":"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf"} Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.127764 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerDied","Data":"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17"} Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130423 4622 generic.go:334] "Generic (PLEG): container finished" podID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerID="2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" exitCode=0 Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130445 4622 generic.go:334] "Generic (PLEG): container finished" podID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerID="5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" exitCode=143 Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130473 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerDied","Data":"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db"} Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130527 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130568 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerDied","Data":"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f"} Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130583 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"91ed7c3e-d3c7-402b-9429-526a5555a8bf","Type":"ContainerDied","Data":"4d4096807c0b2c2769916450e5ba9b9eed569d084d6a3277586981b2235c482f"} Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.130625 4622 scope.go:117] "RemoveContainer" containerID="2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.167322 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.168048 4622 scope.go:117] "RemoveContainer" containerID="5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.175900 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.193929 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:53 crc kubenswrapper[4622]: E1126 11:52:53.194447 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api-log" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.194470 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api-log" Nov 26 11:52:53 crc kubenswrapper[4622]: E1126 11:52:53.194492 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.194514 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.194688 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api-log" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.194709 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" containerName="manila-api" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.195793 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.204807 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.210552 4622 scope.go:117] "RemoveContainer" containerID="2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.212647 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.212796 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.212887 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 26 11:52:53 crc kubenswrapper[4622]: E1126 11:52:53.222781 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db\": container with ID starting with 2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db not found: ID does not exist" containerID="2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.222832 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db"} err="failed to get container status \"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db\": rpc error: code = NotFound desc = could not find container \"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db\": container with ID starting with 2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db not found: ID does not exist" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.222858 4622 scope.go:117] "RemoveContainer" containerID="5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" Nov 26 11:52:53 crc kubenswrapper[4622]: E1126 11:52:53.223857 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f\": container with ID starting with 5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f not found: ID does not exist" containerID="5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.223902 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f"} err="failed to get container status \"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f\": rpc error: code = NotFound desc = could not find container \"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f\": container with ID starting with 5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f not found: ID does not exist" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.223934 4622 scope.go:117] "RemoveContainer" containerID="2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.229294 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db"} err="failed to get container status \"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db\": rpc error: code = NotFound desc = could not find container \"2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db\": container with ID starting with 2ced56c26bc2391b36e899c4919948085e05b091f6c5225ef0165e143c8137db not found: ID does not exist" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.229339 4622 scope.go:117] "RemoveContainer" containerID="5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.235611 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f"} err="failed to get container status \"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f\": rpc error: code = NotFound desc = could not find container \"5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f\": container with ID starting with 5e1900b64f1a6227462bc7e7f211a7fb1cf4deac358572511620aafb0a9fe34f not found: ID does not exist" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.239928 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be89b876-f22c-4a79-8dbd-d543a4165cf3-etc-machine-id\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240008 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-public-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240076 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-internal-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240252 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be89b876-f22c-4a79-8dbd-d543a4165cf3-logs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240394 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240418 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data-custom\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240493 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-scripts\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240566 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.240588 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq22h\" (UniqueName: \"kubernetes.io/projected/be89b876-f22c-4a79-8dbd-d543a4165cf3-kube-api-access-hq22h\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.342853 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be89b876-f22c-4a79-8dbd-d543a4165cf3-etc-machine-id\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.342901 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-public-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.342944 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-internal-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343000 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be89b876-f22c-4a79-8dbd-d543a4165cf3-logs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343061 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343076 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data-custom\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343093 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-scripts\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343110 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343127 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq22h\" (UniqueName: \"kubernetes.io/projected/be89b876-f22c-4a79-8dbd-d543a4165cf3-kube-api-access-hq22h\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.343449 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/be89b876-f22c-4a79-8dbd-d543a4165cf3-etc-machine-id\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.344437 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be89b876-f22c-4a79-8dbd-d543a4165cf3-logs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.347920 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-public-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.348266 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.348574 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.348650 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-internal-tls-certs\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.349428 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-config-data-custom\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.350314 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89b876-f22c-4a79-8dbd-d543a4165cf3-scripts\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.357355 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq22h\" (UniqueName: \"kubernetes.io/projected/be89b876-f22c-4a79-8dbd-d543a4165cf3-kube-api-access-hq22h\") pod \"manila-api-0\" (UID: \"be89b876-f22c-4a79-8dbd-d543a4165cf3\") " pod="openstack/manila-api-0" Nov 26 11:52:53 crc kubenswrapper[4622]: I1126 11:52:53.513863 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 26 11:52:54 crc kubenswrapper[4622]: I1126 11:52:54.155404 4622 generic.go:334] "Generic (PLEG): container finished" podID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerID="95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad" exitCode=0 Nov 26 11:52:54 crc kubenswrapper[4622]: I1126 11:52:54.155491 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerDied","Data":"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad"} Nov 26 11:52:54 crc kubenswrapper[4622]: I1126 11:52:54.728157 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91ed7c3e-d3c7-402b-9429-526a5555a8bf" path="/var/lib/kubelet/pods/91ed7c3e-d3c7-402b-9429-526a5555a8bf/volumes" Nov 26 11:52:56 crc kubenswrapper[4622]: I1126 11:52:56.427968 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.190854 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerStarted","Data":"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4"} Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.191201 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerStarted","Data":"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7"} Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.193452 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be89b876-f22c-4a79-8dbd-d543a4165cf3","Type":"ContainerStarted","Data":"bc2e33bee51780e41915bf0362175d1e49079f8868e0556cda82b471e188a0f3"} Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.193603 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be89b876-f22c-4a79-8dbd-d543a4165cf3","Type":"ContainerStarted","Data":"5e3b779d8d7fa64223674eb2f250a60a249f943c3ae746d44f51308a7e2f5468"} Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.214894 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.614137276 podStartE2EDuration="10.21487864s" podCreationTimestamp="2025-11-26 11:52:47 +0000 UTC" firstStartedPulling="2025-11-26 11:52:48.383884899 +0000 UTC m=+2527.975096421" lastFinishedPulling="2025-11-26 11:52:55.984626273 +0000 UTC m=+2535.575837785" observedRunningTime="2025-11-26 11:52:57.212301591 +0000 UTC m=+2536.803513113" watchObservedRunningTime="2025-11-26 11:52:57.21487864 +0000 UTC m=+2536.806090162" Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.727912 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.745204 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 26 11:52:57 crc kubenswrapper[4622]: I1126 11:52:57.979320 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6885d49d55-25kwv" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.085552 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.085843 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="dnsmasq-dns" containerID="cri-o://ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9" gracePeriod=10 Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.209743 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"be89b876-f22c-4a79-8dbd-d543a4165cf3","Type":"ContainerStarted","Data":"10f60d2d00059d15a3829faa1e2aac90e02d1f23c21bf4ddb754a5d800c09b4a"} Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.230906 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.230894695 podStartE2EDuration="5.230894695s" podCreationTimestamp="2025-11-26 11:52:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:52:58.226992749 +0000 UTC m=+2537.818204270" watchObservedRunningTime="2025-11-26 11:52:58.230894695 +0000 UTC m=+2537.822106218" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.512737 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.684824 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.685035 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.685063 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.685150 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.685175 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.685218 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99wdh\" (UniqueName: \"kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh\") pod \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\" (UID: \"76cda7d7-b0d1-4d08-93ae-70f43efd4438\") " Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.690799 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh" (OuterVolumeSpecName: "kube-api-access-99wdh") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "kube-api-access-99wdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.733948 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config" (OuterVolumeSpecName: "config") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.736440 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.747242 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.747688 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.752236 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76cda7d7-b0d1-4d08-93ae-70f43efd4438" (UID: "76cda7d7-b0d1-4d08-93ae-70f43efd4438"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788439 4622 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788478 4622 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788491 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99wdh\" (UniqueName: \"kubernetes.io/projected/76cda7d7-b0d1-4d08-93ae-70f43efd4438-kube-api-access-99wdh\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788516 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788526 4622 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:58 crc kubenswrapper[4622]: I1126 11:52:58.788533 4622 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76cda7d7-b0d1-4d08-93ae-70f43efd4438-config\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.112391 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196368 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196744 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg94r\" (UniqueName: \"kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196811 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196837 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196884 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196901 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196919 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.196971 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd\") pod \"45efa27c-c104-4e86-8283-eac845b5fdd1\" (UID: \"45efa27c-c104-4e86-8283-eac845b5fdd1\") " Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.197545 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.197853 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.200567 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts" (OuterVolumeSpecName: "scripts") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.202653 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r" (OuterVolumeSpecName: "kube-api-access-sg94r") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "kube-api-access-sg94r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.238165 4622 generic.go:334] "Generic (PLEG): container finished" podID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerID="e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312" exitCode=0 Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.238238 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerDied","Data":"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312"} Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.238269 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45efa27c-c104-4e86-8283-eac845b5fdd1","Type":"ContainerDied","Data":"a8f64ecb8c06ce8ae174938056fa54efbc0dc8b44e6a191df096a0bfa9ccfdca"} Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.238287 4622 scope.go:117] "RemoveContainer" containerID="47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.238433 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.242967 4622 generic.go:334] "Generic (PLEG): container finished" podID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerID="ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9" exitCode=0 Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.243739 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.244077 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" event={"ID":"76cda7d7-b0d1-4d08-93ae-70f43efd4438","Type":"ContainerDied","Data":"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9"} Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.244167 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.244190 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc44c56c-g4nc4" event={"ID":"76cda7d7-b0d1-4d08-93ae-70f43efd4438","Type":"ContainerDied","Data":"7d84d8113976129c6b5ff8cef6c0b210431e03d6f63aaf2c0fb6fdd2516a30b5"} Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.252359 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.254356 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300460 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300494 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300519 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300530 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45efa27c-c104-4e86-8283-eac845b5fdd1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300540 4622 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.300550 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg94r\" (UniqueName: \"kubernetes.io/projected/45efa27c-c104-4e86-8283-eac845b5fdd1-kube-api-access-sg94r\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.306320 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.315376 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data" (OuterVolumeSpecName: "config-data") pod "45efa27c-c104-4e86-8283-eac845b5fdd1" (UID: "45efa27c-c104-4e86-8283-eac845b5fdd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.384452 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.387541 4622 scope.go:117] "RemoveContainer" containerID="c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.393031 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dc44c56c-g4nc4"] Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.404133 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.404166 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45efa27c-c104-4e86-8283-eac845b5fdd1-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.408480 4622 scope.go:117] "RemoveContainer" containerID="e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.434222 4622 scope.go:117] "RemoveContainer" containerID="95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.451772 4622 scope.go:117] "RemoveContainer" containerID="47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.452276 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf\": container with ID starting with 47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf not found: ID does not exist" containerID="47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.452313 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf"} err="failed to get container status \"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf\": rpc error: code = NotFound desc = could not find container \"47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf\": container with ID starting with 47ba66dbbf222db95772293c6c9a1c2b28887cd56d80ffe89ab430ee56a4f1bf not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.452338 4622 scope.go:117] "RemoveContainer" containerID="c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.452779 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17\": container with ID starting with c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17 not found: ID does not exist" containerID="c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.452819 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17"} err="failed to get container status \"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17\": rpc error: code = NotFound desc = could not find container \"c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17\": container with ID starting with c2045a879e74d7473cea6ffa284e1be102459aa2bcb25c1d3f2e9a6d83953d17 not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.452844 4622 scope.go:117] "RemoveContainer" containerID="e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.453123 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312\": container with ID starting with e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312 not found: ID does not exist" containerID="e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.453154 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312"} err="failed to get container status \"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312\": rpc error: code = NotFound desc = could not find container \"e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312\": container with ID starting with e916d09526558825d222c440c16e1937aeb6b9cabd4f0a7ed40d2723f6f34312 not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.453168 4622 scope.go:117] "RemoveContainer" containerID="95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.453541 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad\": container with ID starting with 95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad not found: ID does not exist" containerID="95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.453568 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad"} err="failed to get container status \"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad\": rpc error: code = NotFound desc = could not find container \"95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad\": container with ID starting with 95103bd14ea594d50a21cad687fec99c558b66259b631f5f5b0adb727c217fad not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.453584 4622 scope.go:117] "RemoveContainer" containerID="ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.470585 4622 scope.go:117] "RemoveContainer" containerID="9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.490521 4622 scope.go:117] "RemoveContainer" containerID="ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.490851 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9\": container with ID starting with ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9 not found: ID does not exist" containerID="ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.490883 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9"} err="failed to get container status \"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9\": rpc error: code = NotFound desc = could not find container \"ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9\": container with ID starting with ff6baf2ba317375cc6d57f59e05f1f26d10ce46de3298927d6a501040825bdc9 not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.490906 4622 scope.go:117] "RemoveContainer" containerID="9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.491301 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe\": container with ID starting with 9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe not found: ID does not exist" containerID="9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.491329 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe"} err="failed to get container status \"9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe\": rpc error: code = NotFound desc = could not find container \"9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe\": container with ID starting with 9041ab97c5db6c23f0e5009bd84f4605ab1d81c8e9fec52851ec18f738956efe not found: ID does not exist" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.579143 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.602179 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619114 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.619840 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="proxy-httpd" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619867 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="proxy-httpd" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.619918 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-notification-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619925 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-notification-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.619938 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="dnsmasq-dns" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619945 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="dnsmasq-dns" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.619965 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="init" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619973 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="init" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.619986 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-central-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.619991 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-central-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: E1126 11:52:59.620007 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="sg-core" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620013 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="sg-core" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620264 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" containerName="dnsmasq-dns" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620287 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-central-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620299 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="ceilometer-notification-agent" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620309 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="proxy-httpd" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.620319 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" containerName="sg-core" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.622922 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.626091 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.626814 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.629260 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.629287 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.709494 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.709585 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.709635 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.710279 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.710420 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.710609 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pnp7\" (UniqueName: \"kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.710850 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.711179 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.816934 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817053 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817147 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817195 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817245 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817329 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pnp7\" (UniqueName: \"kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817491 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.817725 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.832340 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.833101 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.838444 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.838703 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.842270 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.842633 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.852669 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pnp7\" (UniqueName: \"kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.853164 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " pod="openstack/ceilometer-0" Nov 26 11:52:59 crc kubenswrapper[4622]: I1126 11:52:59.951767 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:53:00 crc kubenswrapper[4622]: I1126 11:53:00.387545 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:00 crc kubenswrapper[4622]: W1126 11:53:00.391832 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7c076f4_d99d_4f67_a93c_263859151457.slice/crio-ca467e67e5816e9d2a86f769a1db58008c81a93c2685afba944d3e21b559305a WatchSource:0}: Error finding container ca467e67e5816e9d2a86f769a1db58008c81a93c2685afba944d3e21b559305a: Status 404 returned error can't find the container with id ca467e67e5816e9d2a86f769a1db58008c81a93c2685afba944d3e21b559305a Nov 26 11:53:00 crc kubenswrapper[4622]: I1126 11:53:00.718843 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45efa27c-c104-4e86-8283-eac845b5fdd1" path="/var/lib/kubelet/pods/45efa27c-c104-4e86-8283-eac845b5fdd1/volumes" Nov 26 11:53:00 crc kubenswrapper[4622]: I1126 11:53:00.720295 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76cda7d7-b0d1-4d08-93ae-70f43efd4438" path="/var/lib/kubelet/pods/76cda7d7-b0d1-4d08-93ae-70f43efd4438/volumes" Nov 26 11:53:00 crc kubenswrapper[4622]: I1126 11:53:00.928783 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:01 crc kubenswrapper[4622]: I1126 11:53:01.266894 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerStarted","Data":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} Nov 26 11:53:01 crc kubenswrapper[4622]: I1126 11:53:01.267181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerStarted","Data":"ca467e67e5816e9d2a86f769a1db58008c81a93c2685afba944d3e21b559305a"} Nov 26 11:53:03 crc kubenswrapper[4622]: I1126 11:53:03.284805 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerStarted","Data":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} Nov 26 11:53:04 crc kubenswrapper[4622]: I1126 11:53:04.297666 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerStarted","Data":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.318282 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerStarted","Data":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.318485 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-central-agent" containerID="cri-o://733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" gracePeriod=30 Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.318536 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="proxy-httpd" containerID="cri-o://2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" gracePeriod=30 Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.318601 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-notification-agent" containerID="cri-o://31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" gracePeriod=30 Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.318618 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="sg-core" containerID="cri-o://d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" gracePeriod=30 Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.320728 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:53:06 crc kubenswrapper[4622]: I1126 11:53:06.355413 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.261851765 podStartE2EDuration="7.355390854s" podCreationTimestamp="2025-11-26 11:52:59 +0000 UTC" firstStartedPulling="2025-11-26 11:53:00.395112635 +0000 UTC m=+2539.986324157" lastFinishedPulling="2025-11-26 11:53:05.488651723 +0000 UTC m=+2545.079863246" observedRunningTime="2025-11-26 11:53:06.343817643 +0000 UTC m=+2545.935029164" watchObservedRunningTime="2025-11-26 11:53:06.355390854 +0000 UTC m=+2545.946602376" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.047401 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.104070 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.104192 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pnp7\" (UniqueName: \"kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.104301 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.104487 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.104556 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.105020 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.105311 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.105356 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.105390 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd\") pod \"d7c076f4-d99d-4f67-a93c-263859151457\" (UID: \"d7c076f4-d99d-4f67-a93c-263859151457\") " Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.106017 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.106648 4622 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.106667 4622 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7c076f4-d99d-4f67-a93c-263859151457-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.110177 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7" (OuterVolumeSpecName: "kube-api-access-9pnp7") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "kube-api-access-9pnp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.112833 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts" (OuterVolumeSpecName: "scripts") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.128565 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.147727 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.159212 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.179741 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data" (OuterVolumeSpecName: "config-data") pod "d7c076f4-d99d-4f67-a93c-263859151457" (UID: "d7c076f4-d99d-4f67-a93c-263859151457"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208066 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208095 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208106 4622 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208121 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208132 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pnp7\" (UniqueName: \"kubernetes.io/projected/d7c076f4-d99d-4f67-a93c-263859151457-kube-api-access-9pnp7\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.208142 4622 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7c076f4-d99d-4f67-a93c-263859151457-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.331895 4622 generic.go:334] "Generic (PLEG): container finished" podID="d7c076f4-d99d-4f67-a93c-263859151457" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" exitCode=0 Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.331944 4622 generic.go:334] "Generic (PLEG): container finished" podID="d7c076f4-d99d-4f67-a93c-263859151457" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" exitCode=2 Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.331968 4622 generic.go:334] "Generic (PLEG): container finished" podID="d7c076f4-d99d-4f67-a93c-263859151457" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" exitCode=0 Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.331979 4622 generic.go:334] "Generic (PLEG): container finished" podID="d7c076f4-d99d-4f67-a93c-263859151457" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" exitCode=0 Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.331985 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332023 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerDied","Data":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332096 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerDied","Data":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332112 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerDied","Data":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332124 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerDied","Data":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332135 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7c076f4-d99d-4f67-a93c-263859151457","Type":"ContainerDied","Data":"ca467e67e5816e9d2a86f769a1db58008c81a93c2685afba944d3e21b559305a"} Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.332159 4622 scope.go:117] "RemoveContainer" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.358865 4622 scope.go:117] "RemoveContainer" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.365945 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.383002 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.383491 4622 scope.go:117] "RemoveContainer" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.410942 4622 scope.go:117] "RemoveContainer" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.419133 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.419926 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-notification-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.419949 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-notification-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.420006 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="sg-core" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420013 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="sg-core" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.420028 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="proxy-httpd" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420037 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="proxy-httpd" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.420057 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-central-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420062 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-central-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420563 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="sg-core" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420612 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="proxy-httpd" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420627 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-central-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.420645 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c076f4-d99d-4f67-a93c-263859151457" containerName="ceilometer-notification-agent" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.424635 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.427271 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.427474 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.427735 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.434543 4622 scope.go:117] "RemoveContainer" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.435636 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": container with ID starting with 2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6 not found: ID does not exist" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.435668 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} err="failed to get container status \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": rpc error: code = NotFound desc = could not find container \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": container with ID starting with 2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.435693 4622 scope.go:117] "RemoveContainer" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.436731 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": container with ID starting with d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251 not found: ID does not exist" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.436761 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} err="failed to get container status \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": rpc error: code = NotFound desc = could not find container \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": container with ID starting with d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.436780 4622 scope.go:117] "RemoveContainer" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.437053 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": container with ID starting with 31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec not found: ID does not exist" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.437078 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} err="failed to get container status \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": rpc error: code = NotFound desc = could not find container \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": container with ID starting with 31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.437092 4622 scope.go:117] "RemoveContainer" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: E1126 11:53:07.438048 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": container with ID starting with 733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5 not found: ID does not exist" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.438075 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} err="failed to get container status \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": rpc error: code = NotFound desc = could not find container \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": container with ID starting with 733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.438127 4622 scope.go:117] "RemoveContainer" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.438635 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} err="failed to get container status \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": rpc error: code = NotFound desc = could not find container \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": container with ID starting with 2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.438656 4622 scope.go:117] "RemoveContainer" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.441745 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} err="failed to get container status \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": rpc error: code = NotFound desc = could not find container \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": container with ID starting with d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.441806 4622 scope.go:117] "RemoveContainer" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.442551 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} err="failed to get container status \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": rpc error: code = NotFound desc = could not find container \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": container with ID starting with 31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.442571 4622 scope.go:117] "RemoveContainer" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.446166 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.448727 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} err="failed to get container status \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": rpc error: code = NotFound desc = could not find container \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": container with ID starting with 733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.448760 4622 scope.go:117] "RemoveContainer" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.449276 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} err="failed to get container status \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": rpc error: code = NotFound desc = could not find container \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": container with ID starting with 2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.449302 4622 scope.go:117] "RemoveContainer" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.449865 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} err="failed to get container status \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": rpc error: code = NotFound desc = could not find container \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": container with ID starting with d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.449897 4622 scope.go:117] "RemoveContainer" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.450789 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} err="failed to get container status \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": rpc error: code = NotFound desc = could not find container \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": container with ID starting with 31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.450832 4622 scope.go:117] "RemoveContainer" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.451586 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} err="failed to get container status \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": rpc error: code = NotFound desc = could not find container \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": container with ID starting with 733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.451612 4622 scope.go:117] "RemoveContainer" containerID="2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.451825 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6"} err="failed to get container status \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": rpc error: code = NotFound desc = could not find container \"2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6\": container with ID starting with 2ae69d25dccec3c6d9c71af882b4af7691604f41a644ed76b4964bfc634fbdb6 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.451845 4622 scope.go:117] "RemoveContainer" containerID="d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.452047 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251"} err="failed to get container status \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": rpc error: code = NotFound desc = could not find container \"d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251\": container with ID starting with d3cafcba33aa22d499f60d3fb1691f35281c88fa4a506e87dfdf73fca6f64251 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.452065 4622 scope.go:117] "RemoveContainer" containerID="31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.452254 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec"} err="failed to get container status \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": rpc error: code = NotFound desc = could not find container \"31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec\": container with ID starting with 31b6ef08a24d3e59aa5b1c3686222bee2d42521e31c27e1c2d42ff39670cdeec not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.452271 4622 scope.go:117] "RemoveContainer" containerID="733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.452442 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5"} err="failed to get container status \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": rpc error: code = NotFound desc = could not find container \"733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5\": container with ID starting with 733248720cb76464dbee68317c7ce3c3d444160a8f2aac5f6abcdc4970606bf5 not found: ID does not exist" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.515634 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.515802 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-log-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.515860 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-run-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.515944 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.516056 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjbl7\" (UniqueName: \"kubernetes.io/projected/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-kube-api-access-vjbl7\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.516083 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-config-data\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.516222 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-scripts\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.516303 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618158 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjbl7\" (UniqueName: \"kubernetes.io/projected/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-kube-api-access-vjbl7\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618204 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-config-data\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618248 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-scripts\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618275 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618304 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618353 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-log-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618392 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-run-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.618450 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.619599 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-run-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.619732 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-log-httpd\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.623693 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-scripts\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.623834 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.624242 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.624310 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-config-data\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.624525 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.656182 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjbl7\" (UniqueName: \"kubernetes.io/projected/dbf9ca7b-5cb5-4f47-9876-d18e42cade09-kube-api-access-vjbl7\") pod \"ceilometer-0\" (UID: \"dbf9ca7b-5cb5-4f47-9876-d18e42cade09\") " pod="openstack/ceilometer-0" Nov 26 11:53:07 crc kubenswrapper[4622]: I1126 11:53:07.750092 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 26 11:53:08 crc kubenswrapper[4622]: W1126 11:53:08.158286 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbf9ca7b_5cb5_4f47_9876_d18e42cade09.slice/crio-1459b16ec218ddac6485ff7942431b9348283f4f359bc911975e902bb5213291 WatchSource:0}: Error finding container 1459b16ec218ddac6485ff7942431b9348283f4f359bc911975e902bb5213291: Status 404 returned error can't find the container with id 1459b16ec218ddac6485ff7942431b9348283f4f359bc911975e902bb5213291 Nov 26 11:53:08 crc kubenswrapper[4622]: I1126 11:53:08.161603 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 26 11:53:08 crc kubenswrapper[4622]: I1126 11:53:08.340061 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbf9ca7b-5cb5-4f47-9876-d18e42cade09","Type":"ContainerStarted","Data":"1459b16ec218ddac6485ff7942431b9348283f4f359bc911975e902bb5213291"} Nov 26 11:53:08 crc kubenswrapper[4622]: I1126 11:53:08.717419 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7c076f4-d99d-4f67-a93c-263859151457" path="/var/lib/kubelet/pods/d7c076f4-d99d-4f67-a93c-263859151457/volumes" Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.222439 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.275837 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.304291 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.326488 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.352726 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbf9ca7b-5cb5-4f47-9876-d18e42cade09","Type":"ContainerStarted","Data":"650fdb49eca788699f35bea43e52143319e6e1ba8e7bccce1a78912d2156ca11"} Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.352863 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="manila-share" containerID="cri-o://2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" gracePeriod=30 Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.353395 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="manila-scheduler" containerID="cri-o://ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114" gracePeriod=30 Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.353609 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="probe" containerID="cri-o://43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec" gracePeriod=30 Nov 26 11:53:09 crc kubenswrapper[4622]: I1126 11:53:09.353771 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="probe" containerID="cri-o://5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" gracePeriod=30 Nov 26 11:53:10 crc kubenswrapper[4622]: E1126 11:53:10.057804 4622 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cb716dd_ba25_414a_b7f1_57cff3df83ea.slice/crio-43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cb716dd_ba25_414a_b7f1_57cff3df83ea.slice/crio-conmon-43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec.scope\": RecentStats: unable to find data in memory cache]" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.289984 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390590 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkc44\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390679 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390807 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390872 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390915 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.390964 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.391007 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.391041 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data\") pod \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\" (UID: \"caa72da5-8c2e-4830-b830-c4d894f9bb5d\") " Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.394638 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.394757 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.405449 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.405934 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph" (OuterVolumeSpecName: "ceph") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.409978 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts" (OuterVolumeSpecName: "scripts") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.410085 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44" (OuterVolumeSpecName: "kube-api-access-wkc44") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "kube-api-access-wkc44". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.410256 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbf9ca7b-5cb5-4f47-9876-d18e42cade09","Type":"ContainerStarted","Data":"2e7c4aa8376a1ba558be44f0a91a86bb639840542b36ffa0ee72b17c5d629b9d"} Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413297 4622 generic.go:334] "Generic (PLEG): container finished" podID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerID="5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" exitCode=0 Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413329 4622 generic.go:334] "Generic (PLEG): container finished" podID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerID="2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" exitCode=1 Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413376 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerDied","Data":"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4"} Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413409 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerDied","Data":"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7"} Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413423 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"caa72da5-8c2e-4830-b830-c4d894f9bb5d","Type":"ContainerDied","Data":"69ae7f201c7f0cc3c49af4c94bca1a466022cfee30032b88586e7257caedc305"} Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413440 4622 scope.go:117] "RemoveContainer" containerID="5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.413600 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.421167 4622 generic.go:334] "Generic (PLEG): container finished" podID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerID="43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec" exitCode=0 Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.421211 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerDied","Data":"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec"} Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.470758 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.489700 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data" (OuterVolumeSpecName: "config-data") pod "caa72da5-8c2e-4830-b830-c4d894f9bb5d" (UID: "caa72da5-8c2e-4830-b830-c4d894f9bb5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494460 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494494 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494520 4622 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/caa72da5-8c2e-4830-b830-c4d894f9bb5d-var-lib-manila\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494531 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494540 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494551 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkc44\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-kube-api-access-wkc44\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494560 4622 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/caa72da5-8c2e-4830-b830-c4d894f9bb5d-ceph\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.494567 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/caa72da5-8c2e-4830-b830-c4d894f9bb5d-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.502658 4622 scope.go:117] "RemoveContainer" containerID="2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.519489 4622 scope.go:117] "RemoveContainer" containerID="5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" Nov 26 11:53:10 crc kubenswrapper[4622]: E1126 11:53:10.519844 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4\": container with ID starting with 5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4 not found: ID does not exist" containerID="5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.519926 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4"} err="failed to get container status \"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4\": rpc error: code = NotFound desc = could not find container \"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4\": container with ID starting with 5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4 not found: ID does not exist" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.520016 4622 scope.go:117] "RemoveContainer" containerID="2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" Nov 26 11:53:10 crc kubenswrapper[4622]: E1126 11:53:10.520421 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7\": container with ID starting with 2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7 not found: ID does not exist" containerID="2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.520495 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7"} err="failed to get container status \"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7\": rpc error: code = NotFound desc = could not find container \"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7\": container with ID starting with 2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7 not found: ID does not exist" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.520582 4622 scope.go:117] "RemoveContainer" containerID="5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.522785 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4"} err="failed to get container status \"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4\": rpc error: code = NotFound desc = could not find container \"5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4\": container with ID starting with 5cee911831c97e6471e9e02e92980a042c6aa08885fcfee29d76fedbea5a57f4 not found: ID does not exist" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.522923 4622 scope.go:117] "RemoveContainer" containerID="2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.523183 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7"} err="failed to get container status \"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7\": rpc error: code = NotFound desc = could not find container \"2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7\": container with ID starting with 2f3385296b95c17649eaac7e3970002291361984bd4a8f0fbaa6552a1ae9d4c7 not found: ID does not exist" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.773480 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.785447 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.795712 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:10 crc kubenswrapper[4622]: E1126 11:53:10.796231 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="probe" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.796251 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="probe" Nov 26 11:53:10 crc kubenswrapper[4622]: E1126 11:53:10.796262 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="manila-share" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.796270 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="manila-share" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.796490 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="probe" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.796523 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" containerName="manila-share" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.797577 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.802483 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.803596 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906007 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906073 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g992t\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-kube-api-access-g992t\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906130 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906165 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906206 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906319 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-ceph\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906372 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-scripts\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:10 crc kubenswrapper[4622]: I1126 11:53:10.906412 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.008972 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009065 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009096 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009167 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g992t\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-kube-api-access-g992t\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009191 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009249 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009324 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009393 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/31e89058-d523-4206-9017-9d8c1c299094-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009482 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-ceph\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.009566 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-scripts\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.015002 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.015442 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-ceph\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.017563 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-scripts\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.018132 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-config-data\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.018273 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e89058-d523-4206-9017-9d8c1c299094-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.029028 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g992t\" (UniqueName: \"kubernetes.io/projected/31e89058-d523-4206-9017-9d8c1c299094-kube-api-access-g992t\") pod \"manila-share-share1-0\" (UID: \"31e89058-d523-4206-9017-9d8c1c299094\") " pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.117336 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.436007 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbf9ca7b-5cb5-4f47-9876-d18e42cade09","Type":"ContainerStarted","Data":"124cf53d9e222bcc4bbcf65be8f20efb8f5d59d75c06c744173321cbc68c0d6a"} Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.630997 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 26 11:53:11 crc kubenswrapper[4622]: I1126 11:53:11.904348 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.037837 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.037951 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.038521 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.038684 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.038856 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpx58\" (UniqueName: \"kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.038922 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.039069 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle\") pod \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\" (UID: \"1cb716dd-ba25-414a-b7f1-57cff3df83ea\") " Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.044323 4622 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1cb716dd-ba25-414a-b7f1-57cff3df83ea-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.046734 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.047518 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts" (OuterVolumeSpecName: "scripts") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.054416 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58" (OuterVolumeSpecName: "kube-api-access-wpx58") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "kube-api-access-wpx58". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.149063 4622 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.149203 4622 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-scripts\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.149294 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpx58\" (UniqueName: \"kubernetes.io/projected/1cb716dd-ba25-414a-b7f1-57cff3df83ea-kube-api-access-wpx58\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.205366 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.251743 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.324320 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data" (OuterVolumeSpecName: "config-data") pod "1cb716dd-ba25-414a-b7f1-57cff3df83ea" (UID: "1cb716dd-ba25-414a-b7f1-57cff3df83ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.353419 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cb716dd-ba25-414a-b7f1-57cff3df83ea-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.482071 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"31e89058-d523-4206-9017-9d8c1c299094","Type":"ContainerStarted","Data":"38edd9369630faa38e40adc2760a028fd025516ef1792cc10f45f4cf4bce1849"} Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.482121 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"31e89058-d523-4206-9017-9d8c1c299094","Type":"ContainerStarted","Data":"adbd0ff40381851172302ad24d442264700f6f71871c7c5050e0aa2d19198f8b"} Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.486762 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbf9ca7b-5cb5-4f47-9876-d18e42cade09","Type":"ContainerStarted","Data":"3702bbc5967cf49fee8afb6746f4d582619bbb612720d1dcd3f2a5d18eda74d5"} Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.486903 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.490315 4622 generic.go:334] "Generic (PLEG): container finished" podID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerID="ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114" exitCode=0 Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.490347 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerDied","Data":"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114"} Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.490366 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"1cb716dd-ba25-414a-b7f1-57cff3df83ea","Type":"ContainerDied","Data":"444ac08475b3356737da296c2c8a2a8d0851ba182bf23d7cf9abd669b8cd50e2"} Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.490381 4622 scope.go:117] "RemoveContainer" containerID="43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.490846 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.511445 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.57843761 podStartE2EDuration="5.511435618s" podCreationTimestamp="2025-11-26 11:53:07 +0000 UTC" firstStartedPulling="2025-11-26 11:53:08.16084781 +0000 UTC m=+2547.752059321" lastFinishedPulling="2025-11-26 11:53:12.093845806 +0000 UTC m=+2551.685057329" observedRunningTime="2025-11-26 11:53:12.505117836 +0000 UTC m=+2552.096329358" watchObservedRunningTime="2025-11-26 11:53:12.511435618 +0000 UTC m=+2552.102647140" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.520006 4622 scope.go:117] "RemoveContainer" containerID="ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.527406 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.539802 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.549844 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:12 crc kubenswrapper[4622]: E1126 11:53:12.550315 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="manila-scheduler" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.550333 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="manila-scheduler" Nov 26 11:53:12 crc kubenswrapper[4622]: E1126 11:53:12.550349 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="probe" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.550356 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="probe" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.550573 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="manila-scheduler" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.550591 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" containerName="probe" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.551635 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.555723 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.555766 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.568976 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.569026 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.569064 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.569124 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-scripts\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.569181 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj72c\" (UniqueName: \"kubernetes.io/projected/dba7b70b-838a-4828-a4ae-7464524b8217-kube-api-access-gj72c\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.569250 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dba7b70b-838a-4828-a4ae-7464524b8217-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.571420 4622 scope.go:117] "RemoveContainer" containerID="43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec" Nov 26 11:53:12 crc kubenswrapper[4622]: E1126 11:53:12.580125 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec\": container with ID starting with 43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec not found: ID does not exist" containerID="43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.580170 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec"} err="failed to get container status \"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec\": rpc error: code = NotFound desc = could not find container \"43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec\": container with ID starting with 43fd4e3892921d571788b05875bd85f9d9689bc38a7a73f28d69d8cda7d580ec not found: ID does not exist" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.580200 4622 scope.go:117] "RemoveContainer" containerID="ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114" Nov 26 11:53:12 crc kubenswrapper[4622]: E1126 11:53:12.580933 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114\": container with ID starting with ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114 not found: ID does not exist" containerID="ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.580989 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114"} err="failed to get container status \"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114\": rpc error: code = NotFound desc = could not find container \"ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114\": container with ID starting with ea1d5b798538d8b0788948f766e3fed2d6d4b27a1a5f0dbe27e37fe59945b114 not found: ID does not exist" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.671665 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.672007 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-scripts\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.672181 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj72c\" (UniqueName: \"kubernetes.io/projected/dba7b70b-838a-4828-a4ae-7464524b8217-kube-api-access-gj72c\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.672288 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dba7b70b-838a-4828-a4ae-7464524b8217-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.672391 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.672457 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.675157 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dba7b70b-838a-4828-a4ae-7464524b8217-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.679179 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.680063 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-config-data\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.681380 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-scripts\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.681791 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba7b70b-838a-4828-a4ae-7464524b8217-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.691691 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj72c\" (UniqueName: \"kubernetes.io/projected/dba7b70b-838a-4828-a4ae-7464524b8217-kube-api-access-gj72c\") pod \"manila-scheduler-0\" (UID: \"dba7b70b-838a-4828-a4ae-7464524b8217\") " pod="openstack/manila-scheduler-0" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.721695 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cb716dd-ba25-414a-b7f1-57cff3df83ea" path="/var/lib/kubelet/pods/1cb716dd-ba25-414a-b7f1-57cff3df83ea/volumes" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.722631 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caa72da5-8c2e-4830-b830-c4d894f9bb5d" path="/var/lib/kubelet/pods/caa72da5-8c2e-4830-b830-c4d894f9bb5d/volumes" Nov 26 11:53:12 crc kubenswrapper[4622]: I1126 11:53:12.880152 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 26 11:53:13 crc kubenswrapper[4622]: I1126 11:53:13.333157 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 26 11:53:13 crc kubenswrapper[4622]: I1126 11:53:13.507989 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"31e89058-d523-4206-9017-9d8c1c299094","Type":"ContainerStarted","Data":"a8578c4cd0d9f52f3d10832928cc1d3275e3b948ac07d76dc175be02b5f24db2"} Nov 26 11:53:13 crc kubenswrapper[4622]: I1126 11:53:13.510901 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"dba7b70b-838a-4828-a4ae-7464524b8217","Type":"ContainerStarted","Data":"04b904e60b25f627f5a1ccdcac8bab32e9ca9e8f86a1179a486cc0cde6736816"} Nov 26 11:53:13 crc kubenswrapper[4622]: I1126 11:53:13.532781 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.532760123 podStartE2EDuration="3.532760123s" podCreationTimestamp="2025-11-26 11:53:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:53:13.526575552 +0000 UTC m=+2553.117787073" watchObservedRunningTime="2025-11-26 11:53:13.532760123 +0000 UTC m=+2553.123971645" Nov 26 11:53:14 crc kubenswrapper[4622]: I1126 11:53:14.521340 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"dba7b70b-838a-4828-a4ae-7464524b8217","Type":"ContainerStarted","Data":"cdbc4ac0da635661cde42738281684e13d7d4dd0e1c991e4f65e74b82ad253b1"} Nov 26 11:53:14 crc kubenswrapper[4622]: I1126 11:53:14.521655 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"dba7b70b-838a-4828-a4ae-7464524b8217","Type":"ContainerStarted","Data":"4b3ed46f27c553a0e40d35ca9e103308d94f45eeb21657eeccf690336cc3e858"} Nov 26 11:53:14 crc kubenswrapper[4622]: I1126 11:53:14.539743 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.539733609 podStartE2EDuration="2.539733609s" podCreationTimestamp="2025-11-26 11:53:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:53:14.537358351 +0000 UTC m=+2554.128569872" watchObservedRunningTime="2025-11-26 11:53:14.539733609 +0000 UTC m=+2554.130945131" Nov 26 11:53:14 crc kubenswrapper[4622]: I1126 11:53:14.685854 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Nov 26 11:53:21 crc kubenswrapper[4622]: I1126 11:53:21.118069 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 26 11:53:22 crc kubenswrapper[4622]: I1126 11:53:22.881061 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 26 11:53:32 crc kubenswrapper[4622]: I1126 11:53:32.504251 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 26 11:53:34 crc kubenswrapper[4622]: I1126 11:53:34.246151 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 26 11:53:37 crc kubenswrapper[4622]: I1126 11:53:37.760254 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 26 11:54:15 crc kubenswrapper[4622]: I1126 11:54:15.198467 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:54:15 crc kubenswrapper[4622]: I1126 11:54:15.200041 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.637396 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr"] Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.639147 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.672758 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgwhl\" (UniqueName: \"kubernetes.io/projected/93a6cdc7-be83-4eba-af13-f1bc89429adf-kube-api-access-dgwhl\") pod \"openstack-operator-controller-operator-7b8f89988-7flbr\" (UID: \"93a6cdc7-be83-4eba-af13-f1bc89429adf\") " pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.683408 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr"] Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.775208 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgwhl\" (UniqueName: \"kubernetes.io/projected/93a6cdc7-be83-4eba-af13-f1bc89429adf-kube-api-access-dgwhl\") pod \"openstack-operator-controller-operator-7b8f89988-7flbr\" (UID: \"93a6cdc7-be83-4eba-af13-f1bc89429adf\") " pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.798221 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgwhl\" (UniqueName: \"kubernetes.io/projected/93a6cdc7-be83-4eba-af13-f1bc89429adf-kube-api-access-dgwhl\") pod \"openstack-operator-controller-operator-7b8f89988-7flbr\" (UID: \"93a6cdc7-be83-4eba-af13-f1bc89429adf\") " pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:24 crc kubenswrapper[4622]: I1126 11:54:24.962170 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:25 crc kubenswrapper[4622]: I1126 11:54:25.374414 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr"] Nov 26 11:54:26 crc kubenswrapper[4622]: I1126 11:54:26.241118 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" event={"ID":"93a6cdc7-be83-4eba-af13-f1bc89429adf","Type":"ContainerStarted","Data":"f8dd09bb857ec64697b037085efd93812be96fd589de3aaf9ebea6f76e7706a5"} Nov 26 11:54:26 crc kubenswrapper[4622]: I1126 11:54:26.241189 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" event={"ID":"93a6cdc7-be83-4eba-af13-f1bc89429adf","Type":"ContainerStarted","Data":"6caae0d416d16e46c6e87e59b3760f492b0644948c5fdd8443ce42c89f03e6f3"} Nov 26 11:54:26 crc kubenswrapper[4622]: I1126 11:54:26.241292 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:26 crc kubenswrapper[4622]: I1126 11:54:26.274528 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" podStartSLOduration=2.274490557 podStartE2EDuration="2.274490557s" podCreationTimestamp="2025-11-26 11:54:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 11:54:26.265791925 +0000 UTC m=+2625.857003447" watchObservedRunningTime="2025-11-26 11:54:26.274490557 +0000 UTC m=+2625.865702079" Nov 26 11:54:34 crc kubenswrapper[4622]: I1126 11:54:34.965740 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7b8f89988-7flbr" Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.031210 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.031457 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" podUID="d29c5844-9fc3-488b-a40e-7846eea50619" containerName="operator" containerID="cri-o://0107c566aa7d6be33c14f41f7faa1e20340ea506a28be29fc7782b135ad99c86" gracePeriod=10 Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.329669 4622 generic.go:334] "Generic (PLEG): container finished" podID="d29c5844-9fc3-488b-a40e-7846eea50619" containerID="0107c566aa7d6be33c14f41f7faa1e20340ea506a28be29fc7782b135ad99c86" exitCode=0 Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.330021 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" event={"ID":"d29c5844-9fc3-488b-a40e-7846eea50619","Type":"ContainerDied","Data":"0107c566aa7d6be33c14f41f7faa1e20340ea506a28be29fc7782b135ad99c86"} Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.462032 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.628639 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfpmw\" (UniqueName: \"kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw\") pod \"d29c5844-9fc3-488b-a40e-7846eea50619\" (UID: \"d29c5844-9fc3-488b-a40e-7846eea50619\") " Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.635464 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw" (OuterVolumeSpecName: "kube-api-access-qfpmw") pod "d29c5844-9fc3-488b-a40e-7846eea50619" (UID: "d29c5844-9fc3-488b-a40e-7846eea50619"). InnerVolumeSpecName "kube-api-access-qfpmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:54:35 crc kubenswrapper[4622]: I1126 11:54:35.732429 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfpmw\" (UniqueName: \"kubernetes.io/projected/d29c5844-9fc3-488b-a40e-7846eea50619-kube-api-access-qfpmw\") on node \"crc\" DevicePath \"\"" Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.343912 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" event={"ID":"d29c5844-9fc3-488b-a40e-7846eea50619","Type":"ContainerDied","Data":"aee672e653974fb331b1b5ca6491bb7bce6f4780c694fcdddf51e013686f00ce"} Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.344002 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8" Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.344372 4622 scope.go:117] "RemoveContainer" containerID="0107c566aa7d6be33c14f41f7faa1e20340ea506a28be29fc7782b135ad99c86" Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.382675 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.389687 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-544fb75865-ztbk8"] Nov 26 11:54:36 crc kubenswrapper[4622]: I1126 11:54:36.739070 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d29c5844-9fc3-488b-a40e-7846eea50619" path="/var/lib/kubelet/pods/d29c5844-9fc3-488b-a40e-7846eea50619/volumes" Nov 26 11:54:45 crc kubenswrapper[4622]: I1126 11:54:45.198783 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:54:45 crc kubenswrapper[4622]: I1126 11:54:45.199427 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.325837 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs"] Nov 26 11:55:11 crc kubenswrapper[4622]: E1126 11:55:11.327419 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29c5844-9fc3-488b-a40e-7846eea50619" containerName="operator" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.327440 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29c5844-9fc3-488b-a40e-7846eea50619" containerName="operator" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.328653 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="d29c5844-9fc3-488b-a40e-7846eea50619" containerName="operator" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.332327 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.367804 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs"] Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.412783 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p446s\" (UniqueName: \"kubernetes.io/projected/3b8a350c-99de-4c01-ae25-cf10b5247f9a-kube-api-access-p446s\") pod \"test-operator-controller-manager-76b59d6455-m8xbs\" (UID: \"3b8a350c-99de-4c01-ae25-cf10b5247f9a\") " pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.515718 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p446s\" (UniqueName: \"kubernetes.io/projected/3b8a350c-99de-4c01-ae25-cf10b5247f9a-kube-api-access-p446s\") pod \"test-operator-controller-manager-76b59d6455-m8xbs\" (UID: \"3b8a350c-99de-4c01-ae25-cf10b5247f9a\") " pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.538882 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p446s\" (UniqueName: \"kubernetes.io/projected/3b8a350c-99de-4c01-ae25-cf10b5247f9a-kube-api-access-p446s\") pod \"test-operator-controller-manager-76b59d6455-m8xbs\" (UID: \"3b8a350c-99de-4c01-ae25-cf10b5247f9a\") " pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" Nov 26 11:55:11 crc kubenswrapper[4622]: I1126 11:55:11.683308 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" Nov 26 11:55:12 crc kubenswrapper[4622]: I1126 11:55:12.111929 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs"] Nov 26 11:55:12 crc kubenswrapper[4622]: I1126 11:55:12.702378 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" event={"ID":"3b8a350c-99de-4c01-ae25-cf10b5247f9a","Type":"ContainerStarted","Data":"c9b38e22f4a8e69a7d176637631d3e64f0decfe8039421e2b0869cb344c5cf39"} Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.198567 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.199024 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.199095 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.200385 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.200454 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6" gracePeriod=600 Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.738759 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6" exitCode=0 Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.738823 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6"} Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.739195 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515"} Nov 26 11:55:15 crc kubenswrapper[4622]: I1126 11:55:15.739230 4622 scope.go:117] "RemoveContainer" containerID="437d23691f6217c3a6c865d32464ea8c883dbfd4e40bb17850c0fd75b5e30be5" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.018488 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.020831 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.046527 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.063266 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pcvm\" (UniqueName: \"kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.063549 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.063814 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.165740 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.165801 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.165836 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pcvm\" (UniqueName: \"kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.166314 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.166388 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.183615 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pcvm\" (UniqueName: \"kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm\") pod \"redhat-marketplace-29fdb\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.337409 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.750515 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:11 crc kubenswrapper[4622]: I1126 11:57:11.838400 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerStarted","Data":"6cb80484b9e039e732f117fa53f4d41882a634777f3859d695f517e38f80eaa8"} Nov 26 11:57:12 crc kubenswrapper[4622]: E1126 11:57:12.133347 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 11:57:12 crc kubenswrapper[4622]: E1126 11:57:12.133398 4622 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 11:57:12 crc kubenswrapper[4622]: E1126 11:57:12.133663 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p446s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-76b59d6455-m8xbs_openstack-operators(3b8a350c-99de-4c01-ae25-cf10b5247f9a): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" logger="UnhandledError" Nov 26 11:57:12 crc kubenswrapper[4622]: E1126 11:57:12.307983 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \\\"http://38.102.83.113:5001/v2/\\\": dial tcp 38.102.83.113:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 11:57:12 crc kubenswrapper[4622]: I1126 11:57:12.850554 4622 generic.go:334] "Generic (PLEG): container finished" podID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerID="1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084" exitCode=0 Nov 26 11:57:12 crc kubenswrapper[4622]: I1126 11:57:12.850654 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerDied","Data":"1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084"} Nov 26 11:57:12 crc kubenswrapper[4622]: I1126 11:57:12.853393 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" event={"ID":"3b8a350c-99de-4c01-ae25-cf10b5247f9a","Type":"ContainerStarted","Data":"225d35a2710fa5eda0efb6f5d01274789990600332ee8a36e61a089d463307e7"} Nov 26 11:57:12 crc kubenswrapper[4622]: E1126 11:57:12.855836 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 11:57:13 crc kubenswrapper[4622]: E1126 11:57:13.863580 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 11:57:14 crc kubenswrapper[4622]: I1126 11:57:14.871297 4622 generic.go:334] "Generic (PLEG): container finished" podID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerID="cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98" exitCode=0 Nov 26 11:57:14 crc kubenswrapper[4622]: I1126 11:57:14.871348 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerDied","Data":"cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98"} Nov 26 11:57:15 crc kubenswrapper[4622]: I1126 11:57:15.199623 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:57:15 crc kubenswrapper[4622]: I1126 11:57:15.199702 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:57:15 crc kubenswrapper[4622]: I1126 11:57:15.884804 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerStarted","Data":"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74"} Nov 26 11:57:15 crc kubenswrapper[4622]: I1126 11:57:15.907184 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-29fdb" podStartSLOduration=3.123074494 podStartE2EDuration="5.907163435s" podCreationTimestamp="2025-11-26 11:57:10 +0000 UTC" firstStartedPulling="2025-11-26 11:57:12.852466651 +0000 UTC m=+2792.443678174" lastFinishedPulling="2025-11-26 11:57:15.636555593 +0000 UTC m=+2795.227767115" observedRunningTime="2025-11-26 11:57:15.901540867 +0000 UTC m=+2795.492752389" watchObservedRunningTime="2025-11-26 11:57:15.907163435 +0000 UTC m=+2795.498374957" Nov 26 11:57:21 crc kubenswrapper[4622]: I1126 11:57:21.337793 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:21 crc kubenswrapper[4622]: I1126 11:57:21.338539 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:21 crc kubenswrapper[4622]: I1126 11:57:21.382317 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:21 crc kubenswrapper[4622]: I1126 11:57:21.986941 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:22 crc kubenswrapper[4622]: I1126 11:57:22.046103 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:23 crc kubenswrapper[4622]: I1126 11:57:23.964137 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-29fdb" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="registry-server" containerID="cri-o://94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74" gracePeriod=2 Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.368208 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.539898 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pcvm\" (UniqueName: \"kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm\") pod \"c9a85553-3c35-4e98-88c3-6e214bd0a672\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.540380 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content\") pod \"c9a85553-3c35-4e98-88c3-6e214bd0a672\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.540486 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities\") pod \"c9a85553-3c35-4e98-88c3-6e214bd0a672\" (UID: \"c9a85553-3c35-4e98-88c3-6e214bd0a672\") " Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.541397 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities" (OuterVolumeSpecName: "utilities") pod "c9a85553-3c35-4e98-88c3-6e214bd0a672" (UID: "c9a85553-3c35-4e98-88c3-6e214bd0a672"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.546833 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm" (OuterVolumeSpecName: "kube-api-access-4pcvm") pod "c9a85553-3c35-4e98-88c3-6e214bd0a672" (UID: "c9a85553-3c35-4e98-88c3-6e214bd0a672"). InnerVolumeSpecName "kube-api-access-4pcvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.553485 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9a85553-3c35-4e98-88c3-6e214bd0a672" (UID: "c9a85553-3c35-4e98-88c3-6e214bd0a672"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.643119 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pcvm\" (UniqueName: \"kubernetes.io/projected/c9a85553-3c35-4e98-88c3-6e214bd0a672-kube-api-access-4pcvm\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.643150 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.643161 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9a85553-3c35-4e98-88c3-6e214bd0a672-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.975225 4622 generic.go:334] "Generic (PLEG): container finished" podID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerID="94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74" exitCode=0 Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.975314 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-29fdb" Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.975326 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerDied","Data":"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74"} Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.975696 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-29fdb" event={"ID":"c9a85553-3c35-4e98-88c3-6e214bd0a672","Type":"ContainerDied","Data":"6cb80484b9e039e732f117fa53f4d41882a634777f3859d695f517e38f80eaa8"} Nov 26 11:57:24 crc kubenswrapper[4622]: I1126 11:57:24.975725 4622 scope.go:117] "RemoveContainer" containerID="94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.004674 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.008028 4622 scope.go:117] "RemoveContainer" containerID="cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.017756 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-29fdb"] Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.039523 4622 scope.go:117] "RemoveContainer" containerID="1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.063433 4622 scope.go:117] "RemoveContainer" containerID="94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74" Nov 26 11:57:25 crc kubenswrapper[4622]: E1126 11:57:25.063775 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74\": container with ID starting with 94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74 not found: ID does not exist" containerID="94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.063805 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74"} err="failed to get container status \"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74\": rpc error: code = NotFound desc = could not find container \"94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74\": container with ID starting with 94c057e543ebafcc18891dc9e8f40c69665b28415b85cf217307cc5067afec74 not found: ID does not exist" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.063827 4622 scope.go:117] "RemoveContainer" containerID="cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98" Nov 26 11:57:25 crc kubenswrapper[4622]: E1126 11:57:25.064093 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98\": container with ID starting with cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98 not found: ID does not exist" containerID="cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.064114 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98"} err="failed to get container status \"cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98\": rpc error: code = NotFound desc = could not find container \"cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98\": container with ID starting with cc1c9064b275cb43262196a4c2589bd21fe37f59877de6983459f2c78c374b98 not found: ID does not exist" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.064131 4622 scope.go:117] "RemoveContainer" containerID="1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084" Nov 26 11:57:25 crc kubenswrapper[4622]: E1126 11:57:25.064336 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084\": container with ID starting with 1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084 not found: ID does not exist" containerID="1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084" Nov 26 11:57:25 crc kubenswrapper[4622]: I1126 11:57:25.064358 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084"} err="failed to get container status \"1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084\": rpc error: code = NotFound desc = could not find container \"1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084\": container with ID starting with 1bb92da098b76c3fab01824245a4500105e9b23be40abdc4c18cc204c3716084 not found: ID does not exist" Nov 26 11:57:26 crc kubenswrapper[4622]: I1126 11:57:26.716389 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" path="/var/lib/kubelet/pods/c9a85553-3c35-4e98-88c3-6e214bd0a672/volumes" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.206439 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:44 crc kubenswrapper[4622]: E1126 11:57:44.207424 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="extract-utilities" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.207440 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="extract-utilities" Nov 26 11:57:44 crc kubenswrapper[4622]: E1126 11:57:44.207455 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="registry-server" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.207460 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="registry-server" Nov 26 11:57:44 crc kubenswrapper[4622]: E1126 11:57:44.207488 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="extract-content" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.207494 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="extract-content" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.207748 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9a85553-3c35-4e98-88c3-6e214bd0a672" containerName="registry-server" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.209103 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.217297 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.340757 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqz8j\" (UniqueName: \"kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.341087 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.341305 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.444028 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.444138 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.444287 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqz8j\" (UniqueName: \"kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.444550 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.444605 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.474543 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqz8j\" (UniqueName: \"kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j\") pod \"certified-operators-rbq8z\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.534902 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:44 crc kubenswrapper[4622]: I1126 11:57:44.988434 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.189165 4622 generic.go:334] "Generic (PLEG): container finished" podID="ac509298-d27e-4311-a381-fbe8df03c821" containerID="5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8" exitCode=0 Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.189227 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerDied","Data":"5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8"} Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.189265 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerStarted","Data":"9de4b2e265d0ce4b55142f69a81699fdf8f6e688f30d3a1362a5bfb937e6f0f9"} Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.193880 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.198866 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:57:45 crc kubenswrapper[4622]: I1126 11:57:45.198946 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:57:47 crc kubenswrapper[4622]: I1126 11:57:47.212970 4622 generic.go:334] "Generic (PLEG): container finished" podID="ac509298-d27e-4311-a381-fbe8df03c821" containerID="861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d" exitCode=0 Nov 26 11:57:47 crc kubenswrapper[4622]: I1126 11:57:47.213269 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerDied","Data":"861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d"} Nov 26 11:57:49 crc kubenswrapper[4622]: I1126 11:57:49.246099 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerStarted","Data":"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f"} Nov 26 11:57:49 crc kubenswrapper[4622]: I1126 11:57:49.278656 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rbq8z" podStartSLOduration=2.358837979 podStartE2EDuration="5.278632147s" podCreationTimestamp="2025-11-26 11:57:44 +0000 UTC" firstStartedPulling="2025-11-26 11:57:45.193549982 +0000 UTC m=+2824.784761504" lastFinishedPulling="2025-11-26 11:57:48.113344151 +0000 UTC m=+2827.704555672" observedRunningTime="2025-11-26 11:57:49.266318698 +0000 UTC m=+2828.857530210" watchObservedRunningTime="2025-11-26 11:57:49.278632147 +0000 UTC m=+2828.869843669" Nov 26 11:57:54 crc kubenswrapper[4622]: I1126 11:57:54.535779 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:54 crc kubenswrapper[4622]: I1126 11:57:54.537667 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:54 crc kubenswrapper[4622]: I1126 11:57:54.580817 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:55 crc kubenswrapper[4622]: I1126 11:57:55.335329 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:55 crc kubenswrapper[4622]: I1126 11:57:55.374360 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.320835 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rbq8z" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="registry-server" containerID="cri-o://d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f" gracePeriod=2 Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.730653 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.867665 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities\") pod \"ac509298-d27e-4311-a381-fbe8df03c821\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.869693 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities" (OuterVolumeSpecName: "utilities") pod "ac509298-d27e-4311-a381-fbe8df03c821" (UID: "ac509298-d27e-4311-a381-fbe8df03c821"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.872419 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqz8j\" (UniqueName: \"kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j\") pod \"ac509298-d27e-4311-a381-fbe8df03c821\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.872591 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content\") pod \"ac509298-d27e-4311-a381-fbe8df03c821\" (UID: \"ac509298-d27e-4311-a381-fbe8df03c821\") " Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.876382 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.891822 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j" (OuterVolumeSpecName: "kube-api-access-gqz8j") pod "ac509298-d27e-4311-a381-fbe8df03c821" (UID: "ac509298-d27e-4311-a381-fbe8df03c821"). InnerVolumeSpecName "kube-api-access-gqz8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.918953 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac509298-d27e-4311-a381-fbe8df03c821" (UID: "ac509298-d27e-4311-a381-fbe8df03c821"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.978065 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac509298-d27e-4311-a381-fbe8df03c821-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:57 crc kubenswrapper[4622]: I1126 11:57:57.978096 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqz8j\" (UniqueName: \"kubernetes.io/projected/ac509298-d27e-4311-a381-fbe8df03c821-kube-api-access-gqz8j\") on node \"crc\" DevicePath \"\"" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.333414 4622 generic.go:334] "Generic (PLEG): container finished" podID="ac509298-d27e-4311-a381-fbe8df03c821" containerID="d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f" exitCode=0 Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.333463 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerDied","Data":"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f"} Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.333522 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbq8z" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.333567 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbq8z" event={"ID":"ac509298-d27e-4311-a381-fbe8df03c821","Type":"ContainerDied","Data":"9de4b2e265d0ce4b55142f69a81699fdf8f6e688f30d3a1362a5bfb937e6f0f9"} Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.333597 4622 scope.go:117] "RemoveContainer" containerID="d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.361532 4622 scope.go:117] "RemoveContainer" containerID="861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.368783 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.375320 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rbq8z"] Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.389652 4622 scope.go:117] "RemoveContainer" containerID="5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.410066 4622 scope.go:117] "RemoveContainer" containerID="d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f" Nov 26 11:57:58 crc kubenswrapper[4622]: E1126 11:57:58.410390 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f\": container with ID starting with d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f not found: ID does not exist" containerID="d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.410433 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f"} err="failed to get container status \"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f\": rpc error: code = NotFound desc = could not find container \"d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f\": container with ID starting with d669e40046f049b9a9598c8c130bba7751e55cbe7b8b7ffd1f1a02c2212d010f not found: ID does not exist" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.410459 4622 scope.go:117] "RemoveContainer" containerID="861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d" Nov 26 11:57:58 crc kubenswrapper[4622]: E1126 11:57:58.410809 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d\": container with ID starting with 861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d not found: ID does not exist" containerID="861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.410840 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d"} err="failed to get container status \"861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d\": rpc error: code = NotFound desc = could not find container \"861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d\": container with ID starting with 861f63a80a6c5202a8542d88f00a47e01f326a60293e2c0e7ba6daca7113a97d not found: ID does not exist" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.410885 4622 scope.go:117] "RemoveContainer" containerID="5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8" Nov 26 11:57:58 crc kubenswrapper[4622]: E1126 11:57:58.411307 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8\": container with ID starting with 5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8 not found: ID does not exist" containerID="5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.411356 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8"} err="failed to get container status \"5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8\": rpc error: code = NotFound desc = could not find container \"5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8\": container with ID starting with 5d0424569e9572b2c04e67547cb3dba05005945ce47f64aea91587e835dc4cf8 not found: ID does not exist" Nov 26 11:57:58 crc kubenswrapper[4622]: I1126 11:57:58.719896 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac509298-d27e-4311-a381-fbe8df03c821" path="/var/lib/kubelet/pods/ac509298-d27e-4311-a381-fbe8df03c821/volumes" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.130679 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:05 crc kubenswrapper[4622]: E1126 11:58:05.131660 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="registry-server" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.131677 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="registry-server" Nov 26 11:58:05 crc kubenswrapper[4622]: E1126 11:58:05.131690 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="extract-utilities" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.131696 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="extract-utilities" Nov 26 11:58:05 crc kubenswrapper[4622]: E1126 11:58:05.131724 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="extract-content" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.131730 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="extract-content" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.131962 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac509298-d27e-4311-a381-fbe8df03c821" containerName="registry-server" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.133465 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.156577 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.230883 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.231083 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.231197 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gxld\" (UniqueName: \"kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.333154 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gxld\" (UniqueName: \"kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.333524 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.333625 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.334162 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.334194 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.352724 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gxld\" (UniqueName: \"kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld\") pod \"community-operators-z5twh\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.455469 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:05 crc kubenswrapper[4622]: I1126 11:58:05.921509 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:06 crc kubenswrapper[4622]: I1126 11:58:06.427343 4622 generic.go:334] "Generic (PLEG): container finished" podID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerID="9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197" exitCode=0 Nov 26 11:58:06 crc kubenswrapper[4622]: I1126 11:58:06.427449 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerDied","Data":"9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197"} Nov 26 11:58:06 crc kubenswrapper[4622]: I1126 11:58:06.427789 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerStarted","Data":"95ef83adf26d196463061a2f77a9744a88c0b48d5b0bae54ad4be0d67786c474"} Nov 26 11:58:07 crc kubenswrapper[4622]: I1126 11:58:07.438852 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerStarted","Data":"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c"} Nov 26 11:58:08 crc kubenswrapper[4622]: I1126 11:58:08.449954 4622 generic.go:334] "Generic (PLEG): container finished" podID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerID="9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c" exitCode=0 Nov 26 11:58:08 crc kubenswrapper[4622]: I1126 11:58:08.450078 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerDied","Data":"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c"} Nov 26 11:58:09 crc kubenswrapper[4622]: I1126 11:58:09.463622 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerStarted","Data":"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7"} Nov 26 11:58:09 crc kubenswrapper[4622]: I1126 11:58:09.484912 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z5twh" podStartSLOduration=1.845576723 podStartE2EDuration="4.484893115s" podCreationTimestamp="2025-11-26 11:58:05 +0000 UTC" firstStartedPulling="2025-11-26 11:58:06.431247372 +0000 UTC m=+2846.022458894" lastFinishedPulling="2025-11-26 11:58:09.070563775 +0000 UTC m=+2848.661775286" observedRunningTime="2025-11-26 11:58:09.480408673 +0000 UTC m=+2849.071620195" watchObservedRunningTime="2025-11-26 11:58:09.484893115 +0000 UTC m=+2849.076104637" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.198799 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.199490 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.199570 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.200687 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.200742 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" gracePeriod=600 Nov 26 11:58:15 crc kubenswrapper[4622]: E1126 11:58:15.335014 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.456296 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.457710 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.497286 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.521704 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" exitCode=0 Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.521956 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515"} Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.522032 4622 scope.go:117] "RemoveContainer" containerID="5962ee80bea408a494d1df1a7b2f224fb827333c03df82cf77066082ed5701b6" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.522829 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:58:15 crc kubenswrapper[4622]: E1126 11:58:15.523770 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.581536 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:15 crc kubenswrapper[4622]: I1126 11:58:15.737567 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:17 crc kubenswrapper[4622]: I1126 11:58:17.541044 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z5twh" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="registry-server" containerID="cri-o://7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7" gracePeriod=2 Nov 26 11:58:17 crc kubenswrapper[4622]: I1126 11:58:17.953278 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.017241 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities\") pod \"1bd60a33-30ce-4eef-ad4b-c139920284d4\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.017312 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gxld\" (UniqueName: \"kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld\") pod \"1bd60a33-30ce-4eef-ad4b-c139920284d4\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.017419 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content\") pod \"1bd60a33-30ce-4eef-ad4b-c139920284d4\" (UID: \"1bd60a33-30ce-4eef-ad4b-c139920284d4\") " Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.018024 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities" (OuterVolumeSpecName: "utilities") pod "1bd60a33-30ce-4eef-ad4b-c139920284d4" (UID: "1bd60a33-30ce-4eef-ad4b-c139920284d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.022550 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld" (OuterVolumeSpecName: "kube-api-access-7gxld") pod "1bd60a33-30ce-4eef-ad4b-c139920284d4" (UID: "1bd60a33-30ce-4eef-ad4b-c139920284d4"). InnerVolumeSpecName "kube-api-access-7gxld". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.069334 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1bd60a33-30ce-4eef-ad4b-c139920284d4" (UID: "1bd60a33-30ce-4eef-ad4b-c139920284d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.119875 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.119907 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gxld\" (UniqueName: \"kubernetes.io/projected/1bd60a33-30ce-4eef-ad4b-c139920284d4-kube-api-access-7gxld\") on node \"crc\" DevicePath \"\"" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.119921 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bd60a33-30ce-4eef-ad4b-c139920284d4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.551453 4622 generic.go:334] "Generic (PLEG): container finished" podID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerID="7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7" exitCode=0 Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.551540 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5twh" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.551547 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerDied","Data":"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7"} Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.552873 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5twh" event={"ID":"1bd60a33-30ce-4eef-ad4b-c139920284d4","Type":"ContainerDied","Data":"95ef83adf26d196463061a2f77a9744a88c0b48d5b0bae54ad4be0d67786c474"} Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.552907 4622 scope.go:117] "RemoveContainer" containerID="7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.586608 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.591359 4622 scope.go:117] "RemoveContainer" containerID="9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.594415 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z5twh"] Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.613589 4622 scope.go:117] "RemoveContainer" containerID="9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.642622 4622 scope.go:117] "RemoveContainer" containerID="7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7" Nov 26 11:58:18 crc kubenswrapper[4622]: E1126 11:58:18.642947 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7\": container with ID starting with 7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7 not found: ID does not exist" containerID="7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.642980 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7"} err="failed to get container status \"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7\": rpc error: code = NotFound desc = could not find container \"7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7\": container with ID starting with 7145e7f31e08ae5587bb30476fbb54da5708a8efc5fd68b6ce29cdb1801b67e7 not found: ID does not exist" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.643012 4622 scope.go:117] "RemoveContainer" containerID="9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c" Nov 26 11:58:18 crc kubenswrapper[4622]: E1126 11:58:18.643313 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c\": container with ID starting with 9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c not found: ID does not exist" containerID="9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.643334 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c"} err="failed to get container status \"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c\": rpc error: code = NotFound desc = could not find container \"9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c\": container with ID starting with 9a3e42cb6938d5ab92a5249a49b19a4cf6e6d8d7c1d7424761cf7e9b1d8b591c not found: ID does not exist" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.643351 4622 scope.go:117] "RemoveContainer" containerID="9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197" Nov 26 11:58:18 crc kubenswrapper[4622]: E1126 11:58:18.643592 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197\": container with ID starting with 9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197 not found: ID does not exist" containerID="9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.643613 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197"} err="failed to get container status \"9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197\": rpc error: code = NotFound desc = could not find container \"9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197\": container with ID starting with 9713e5b9c2a4e0bae54ed1ac373adcc1b9241763cdda08d7bb53573d7e607197 not found: ID does not exist" Nov 26 11:58:18 crc kubenswrapper[4622]: I1126 11:58:18.717278 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" path="/var/lib/kubelet/pods/1bd60a33-30ce-4eef-ad4b-c139920284d4/volumes" Nov 26 11:58:28 crc kubenswrapper[4622]: I1126 11:58:28.706441 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:58:28 crc kubenswrapper[4622]: E1126 11:58:28.707212 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.919494 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7khvt/must-gather-hbdnk"] Nov 26 11:58:36 crc kubenswrapper[4622]: E1126 11:58:36.920460 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="registry-server" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.920477 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="registry-server" Nov 26 11:58:36 crc kubenswrapper[4622]: E1126 11:58:36.922927 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="extract-utilities" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.922940 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="extract-utilities" Nov 26 11:58:36 crc kubenswrapper[4622]: E1126 11:58:36.922954 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="extract-content" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.922960 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="extract-content" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.923241 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bd60a33-30ce-4eef-ad4b-c139920284d4" containerName="registry-server" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.924264 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.927059 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-7khvt"/"openshift-service-ca.crt" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.927280 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-7khvt"/"default-dockercfg-457rt" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.927437 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-7khvt"/"kube-root-ca.crt" Nov 26 11:58:36 crc kubenswrapper[4622]: I1126 11:58:36.946195 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-7khvt/must-gather-hbdnk"] Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.024540 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.024700 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x42w\" (UniqueName: \"kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.127143 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x42w\" (UniqueName: \"kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.127824 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.128232 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.152450 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x42w\" (UniqueName: \"kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w\") pod \"must-gather-hbdnk\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.246598 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.748670 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-7khvt/must-gather-hbdnk"] Nov 26 11:58:37 crc kubenswrapper[4622]: I1126 11:58:37.800043 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/must-gather-hbdnk" event={"ID":"5ca03bf9-3a97-4510-bc3f-dd32381bb44d","Type":"ContainerStarted","Data":"8e9807ac08fd198ab5a38f4c87b750b683cb4e66968e437707a0a3837eb0dd6e"} Nov 26 11:58:42 crc kubenswrapper[4622]: I1126 11:58:42.868530 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/must-gather-hbdnk" event={"ID":"5ca03bf9-3a97-4510-bc3f-dd32381bb44d","Type":"ContainerStarted","Data":"604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4"} Nov 26 11:58:42 crc kubenswrapper[4622]: I1126 11:58:42.869241 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/must-gather-hbdnk" event={"ID":"5ca03bf9-3a97-4510-bc3f-dd32381bb44d","Type":"ContainerStarted","Data":"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e"} Nov 26 11:58:42 crc kubenswrapper[4622]: I1126 11:58:42.903077 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-7khvt/must-gather-hbdnk" podStartSLOduration=2.577999653 podStartE2EDuration="6.903052303s" podCreationTimestamp="2025-11-26 11:58:36 +0000 UTC" firstStartedPulling="2025-11-26 11:58:37.766822842 +0000 UTC m=+2877.358034365" lastFinishedPulling="2025-11-26 11:58:42.091875493 +0000 UTC m=+2881.683087015" observedRunningTime="2025-11-26 11:58:42.892208915 +0000 UTC m=+2882.483420437" watchObservedRunningTime="2025-11-26 11:58:42.903052303 +0000 UTC m=+2882.494263825" Nov 26 11:58:43 crc kubenswrapper[4622]: I1126 11:58:43.706007 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:58:43 crc kubenswrapper[4622]: E1126 11:58:43.706680 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:58:45 crc kubenswrapper[4622]: I1126 11:58:45.933460 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7khvt/crc-debug-7bj4c"] Nov 26 11:58:45 crc kubenswrapper[4622]: I1126 11:58:45.935511 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:45 crc kubenswrapper[4622]: I1126 11:58:45.973688 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:45 crc kubenswrapper[4622]: I1126 11:58:45.973973 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zgnl\" (UniqueName: \"kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.075856 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zgnl\" (UniqueName: \"kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.076032 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.076222 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.106398 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zgnl\" (UniqueName: \"kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl\") pod \"crc-debug-7bj4c\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.252345 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:58:46 crc kubenswrapper[4622]: W1126 11:58:46.278660 4622 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90913128_a4bc_4c7d_9307_6355d5f1ee43.slice/crio-e8cb65b57a44552222e963590e8b775e8ca457f9fd07a8328d6555ba675feaa2 WatchSource:0}: Error finding container e8cb65b57a44552222e963590e8b775e8ca457f9fd07a8328d6555ba675feaa2: Status 404 returned error can't find the container with id e8cb65b57a44552222e963590e8b775e8ca457f9fd07a8328d6555ba675feaa2 Nov 26 11:58:46 crc kubenswrapper[4622]: I1126 11:58:46.901999 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" event={"ID":"90913128-a4bc-4c7d-9307-6355d5f1ee43","Type":"ContainerStarted","Data":"e8cb65b57a44552222e963590e8b775e8ca457f9fd07a8328d6555ba675feaa2"} Nov 26 11:58:57 crc kubenswrapper[4622]: I1126 11:58:57.001456 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" event={"ID":"90913128-a4bc-4c7d-9307-6355d5f1ee43","Type":"ContainerStarted","Data":"618fb05c3697dbd648c3836c3ff2c7206192723e76396506f36c4374543464c5"} Nov 26 11:58:57 crc kubenswrapper[4622]: I1126 11:58:57.018881 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" podStartSLOduration=2.007045125 podStartE2EDuration="12.018856828s" podCreationTimestamp="2025-11-26 11:58:45 +0000 UTC" firstStartedPulling="2025-11-26 11:58:46.281054139 +0000 UTC m=+2885.872265661" lastFinishedPulling="2025-11-26 11:58:56.292865842 +0000 UTC m=+2895.884077364" observedRunningTime="2025-11-26 11:58:57.015908641 +0000 UTC m=+2896.607120163" watchObservedRunningTime="2025-11-26 11:58:57.018856828 +0000 UTC m=+2896.610068349" Nov 26 11:58:57 crc kubenswrapper[4622]: I1126 11:58:57.706016 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:58:57 crc kubenswrapper[4622]: E1126 11:58:57.706556 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:59:08 crc kubenswrapper[4622]: I1126 11:59:08.706670 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:59:08 crc kubenswrapper[4622]: E1126 11:59:08.707645 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:59:16 crc kubenswrapper[4622]: I1126 11:59:16.158075 4622 generic.go:334] "Generic (PLEG): container finished" podID="90913128-a4bc-4c7d-9307-6355d5f1ee43" containerID="618fb05c3697dbd648c3836c3ff2c7206192723e76396506f36c4374543464c5" exitCode=0 Nov 26 11:59:16 crc kubenswrapper[4622]: I1126 11:59:16.158154 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" event={"ID":"90913128-a4bc-4c7d-9307-6355d5f1ee43","Type":"ContainerDied","Data":"618fb05c3697dbd648c3836c3ff2c7206192723e76396506f36c4374543464c5"} Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.267020 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.348770 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7khvt/crc-debug-7bj4c"] Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.358947 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7khvt/crc-debug-7bj4c"] Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.419571 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zgnl\" (UniqueName: \"kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl\") pod \"90913128-a4bc-4c7d-9307-6355d5f1ee43\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.419720 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host\") pod \"90913128-a4bc-4c7d-9307-6355d5f1ee43\" (UID: \"90913128-a4bc-4c7d-9307-6355d5f1ee43\") " Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.420647 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host" (OuterVolumeSpecName: "host") pod "90913128-a4bc-4c7d-9307-6355d5f1ee43" (UID: "90913128-a4bc-4c7d-9307-6355d5f1ee43"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.426532 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl" (OuterVolumeSpecName: "kube-api-access-4zgnl") pod "90913128-a4bc-4c7d-9307-6355d5f1ee43" (UID: "90913128-a4bc-4c7d-9307-6355d5f1ee43"). InnerVolumeSpecName "kube-api-access-4zgnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.523323 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zgnl\" (UniqueName: \"kubernetes.io/projected/90913128-a4bc-4c7d-9307-6355d5f1ee43-kube-api-access-4zgnl\") on node \"crc\" DevicePath \"\"" Nov 26 11:59:17 crc kubenswrapper[4622]: I1126 11:59:17.523357 4622 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90913128-a4bc-4c7d-9307-6355d5f1ee43-host\") on node \"crc\" DevicePath \"\"" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.181585 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8cb65b57a44552222e963590e8b775e8ca457f9fd07a8328d6555ba675feaa2" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.181654 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-7bj4c" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.582230 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7khvt/crc-debug-n49j4"] Nov 26 11:59:18 crc kubenswrapper[4622]: E1126 11:59:18.583301 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90913128-a4bc-4c7d-9307-6355d5f1ee43" containerName="container-00" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.583326 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="90913128-a4bc-4c7d-9307-6355d5f1ee43" containerName="container-00" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.583634 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="90913128-a4bc-4c7d-9307-6355d5f1ee43" containerName="container-00" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.584542 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.714675 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90913128-a4bc-4c7d-9307-6355d5f1ee43" path="/var/lib/kubelet/pods/90913128-a4bc-4c7d-9307-6355d5f1ee43/volumes" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.754686 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.754823 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbvrg\" (UniqueName: \"kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.856902 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbvrg\" (UniqueName: \"kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.857054 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.857293 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.881422 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbvrg\" (UniqueName: \"kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg\") pod \"crc-debug-n49j4\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:18 crc kubenswrapper[4622]: I1126 11:59:18.903121 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:19 crc kubenswrapper[4622]: I1126 11:59:19.193431 4622 generic.go:334] "Generic (PLEG): container finished" podID="cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" containerID="be7bda3d10ecb359553514ddc794793878654990ad8f655505644ec8b3ecf92e" exitCode=1 Nov 26 11:59:19 crc kubenswrapper[4622]: I1126 11:59:19.193534 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/crc-debug-n49j4" event={"ID":"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e","Type":"ContainerDied","Data":"be7bda3d10ecb359553514ddc794793878654990ad8f655505644ec8b3ecf92e"} Nov 26 11:59:19 crc kubenswrapper[4622]: I1126 11:59:19.193847 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/crc-debug-n49j4" event={"ID":"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e","Type":"ContainerStarted","Data":"ccdef9ed9af53c0105fa098434e1a48e230ef1907eb1ccd84777eb985f087fcf"} Nov 26 11:59:19 crc kubenswrapper[4622]: I1126 11:59:19.227218 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7khvt/crc-debug-n49j4"] Nov 26 11:59:19 crc kubenswrapper[4622]: I1126 11:59:19.237405 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7khvt/crc-debug-n49j4"] Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.309762 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.388875 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbvrg\" (UniqueName: \"kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg\") pod \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.389214 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host\") pod \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\" (UID: \"cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e\") " Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.390299 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host" (OuterVolumeSpecName: "host") pod "cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" (UID: "cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.400741 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg" (OuterVolumeSpecName: "kube-api-access-fbvrg") pod "cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" (UID: "cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e"). InnerVolumeSpecName "kube-api-access-fbvrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.493190 4622 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-host\") on node \"crc\" DevicePath \"\"" Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.493234 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbvrg\" (UniqueName: \"kubernetes.io/projected/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e-kube-api-access-fbvrg\") on node \"crc\" DevicePath \"\"" Nov 26 11:59:20 crc kubenswrapper[4622]: I1126 11:59:20.725032 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" path="/var/lib/kubelet/pods/cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e/volumes" Nov 26 11:59:21 crc kubenswrapper[4622]: I1126 11:59:21.216311 4622 scope.go:117] "RemoveContainer" containerID="be7bda3d10ecb359553514ddc794793878654990ad8f655505644ec8b3ecf92e" Nov 26 11:59:21 crc kubenswrapper[4622]: I1126 11:59:21.216362 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/crc-debug-n49j4" Nov 26 11:59:21 crc kubenswrapper[4622]: I1126 11:59:21.706611 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:59:21 crc kubenswrapper[4622]: E1126 11:59:21.706933 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:59:28 crc kubenswrapper[4622]: E1126 11:59:28.713546 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 11:59:28 crc kubenswrapper[4622]: E1126 11:59:28.714098 4622 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 11:59:28 crc kubenswrapper[4622]: E1126 11:59:28.714239 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p446s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-76b59d6455-m8xbs_openstack-operators(3b8a350c-99de-4c01-ae25-cf10b5247f9a): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" logger="UnhandledError" Nov 26 11:59:28 crc kubenswrapper[4622]: E1126 11:59:28.715345 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \\\"http://38.102.83.113:5001/v2/\\\": dial tcp 38.102.83.113:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 11:59:32 crc kubenswrapper[4622]: I1126 11:59:32.706420 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:59:32 crc kubenswrapper[4622]: E1126 11:59:32.707541 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:59:39 crc kubenswrapper[4622]: E1126 11:59:39.709752 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 11:59:44 crc kubenswrapper[4622]: I1126 11:59:44.708466 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:59:44 crc kubenswrapper[4622]: E1126 11:59:44.709589 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 11:59:59 crc kubenswrapper[4622]: I1126 11:59:59.706450 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 11:59:59 crc kubenswrapper[4622]: E1126 11:59:59.707560 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.164857 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf"] Nov 26 12:00:00 crc kubenswrapper[4622]: E1126 12:00:00.165401 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" containerName="container-00" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.165420 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" containerName="container-00" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.165652 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfdd2a01-11ec-4fe8-8d6a-b2ffc666c02e" containerName="container-00" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.166456 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.172972 4622 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.173597 4622 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.179243 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf"] Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.218856 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.218922 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.219035 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhskh\" (UniqueName: \"kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.321439 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhskh\" (UniqueName: \"kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.321605 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.321741 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.322730 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.329379 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.338735 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhskh\" (UniqueName: \"kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh\") pod \"collect-profiles-29402640-8v4bf\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.501094 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:00 crc kubenswrapper[4622]: I1126 12:00:00.927232 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf"] Nov 26 12:00:01 crc kubenswrapper[4622]: I1126 12:00:01.598394 4622 generic.go:334] "Generic (PLEG): container finished" podID="accfe111-409a-479c-9ffd-7ed26521032f" containerID="49d3bc83e18581dbbb77e465a23ba83e42be07788b9c9168255f1eed46150336" exitCode=0 Nov 26 12:00:01 crc kubenswrapper[4622]: I1126 12:00:01.598521 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" event={"ID":"accfe111-409a-479c-9ffd-7ed26521032f","Type":"ContainerDied","Data":"49d3bc83e18581dbbb77e465a23ba83e42be07788b9c9168255f1eed46150336"} Nov 26 12:00:01 crc kubenswrapper[4622]: I1126 12:00:01.598702 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" event={"ID":"accfe111-409a-479c-9ffd-7ed26521032f","Type":"ContainerStarted","Data":"a8cfbd5ced5c367b48507cd086ece5ea495faee267740e69037c3166d6335c38"} Nov 26 12:00:02 crc kubenswrapper[4622]: I1126 12:00:02.906294 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:02 crc kubenswrapper[4622]: I1126 12:00:02.996736 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cc95b6f5d-n86gp_b07c19ec-e29f-4d61-b665-c5d66e37d79f/barbican-api/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.053335 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cc95b6f5d-n86gp_b07c19ec-e29f-4d61-b665-c5d66e37d79f/barbican-api-log/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.081654 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhskh\" (UniqueName: \"kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh\") pod \"accfe111-409a-479c-9ffd-7ed26521032f\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.082071 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume\") pod \"accfe111-409a-479c-9ffd-7ed26521032f\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.082099 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume\") pod \"accfe111-409a-479c-9ffd-7ed26521032f\" (UID: \"accfe111-409a-479c-9ffd-7ed26521032f\") " Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.082702 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume" (OuterVolumeSpecName: "config-volume") pod "accfe111-409a-479c-9ffd-7ed26521032f" (UID: "accfe111-409a-479c-9ffd-7ed26521032f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.087005 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh" (OuterVolumeSpecName: "kube-api-access-qhskh") pod "accfe111-409a-479c-9ffd-7ed26521032f" (UID: "accfe111-409a-479c-9ffd-7ed26521032f"). InnerVolumeSpecName "kube-api-access-qhskh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.087492 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "accfe111-409a-479c-9ffd-7ed26521032f" (UID: "accfe111-409a-479c-9ffd-7ed26521032f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.158055 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7445bd57d8-4bsbs_e0174809-96a2-416e-82b9-9519d71a81d2/barbican-keystone-listener/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.185495 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhskh\" (UniqueName: \"kubernetes.io/projected/accfe111-409a-479c-9ffd-7ed26521032f-kube-api-access-qhskh\") on node \"crc\" DevicePath \"\"" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.185612 4622 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/accfe111-409a-479c-9ffd-7ed26521032f-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.185678 4622 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/accfe111-409a-479c-9ffd-7ed26521032f-config-volume\") on node \"crc\" DevicePath \"\"" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.222643 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7445bd57d8-4bsbs_e0174809-96a2-416e-82b9-9519d71a81d2/barbican-keystone-listener-log/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.279420 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-755d984c7c-rw8ld_8523271e-6368-4a2a-b81e-fd513c441cb6/barbican-worker/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.366486 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-755d984c7c-rw8ld_8523271e-6368-4a2a-b81e-fd513c441cb6/barbican-worker-log/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.484191 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-757ff_f8ee3dad-287f-4ba8-9b48-0eb36bdabafd/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.540106 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dbf9ca7b-5cb5-4f47-9876-d18e42cade09/ceilometer-central-agent/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.616938 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dbf9ca7b-5cb5-4f47-9876-d18e42cade09/ceilometer-notification-agent/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.619131 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" event={"ID":"accfe111-409a-479c-9ffd-7ed26521032f","Type":"ContainerDied","Data":"a8cfbd5ced5c367b48507cd086ece5ea495faee267740e69037c3166d6335c38"} Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.619247 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8cfbd5ced5c367b48507cd086ece5ea495faee267740e69037c3166d6335c38" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.619202 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29402640-8v4bf" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.667533 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dbf9ca7b-5cb5-4f47-9876-d18e42cade09/proxy-httpd/0.log" Nov 26 12:00:03 crc kubenswrapper[4622]: I1126 12:00:03.718900 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dbf9ca7b-5cb5-4f47-9876-d18e42cade09/sg-core/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.021650 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7"] Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.045814 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29402595-48vr7"] Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.096833 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-pmjs4_c780381a-c3ad-4538-a33f-b1d7611667ef/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.097751 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-6dbts_5864c7d5-7acc-4a81-8fea-4bdf3bc09e5c/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.257042 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_31b46baf-53d8-4837-9834-4e465b305b6d/cinder-api/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.273127 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_31b46baf-53d8-4837-9834-4e465b305b6d/cinder-api-log/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.451043 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_950a31f4-607a-4c61-aa2c-606959f96b3b/probe/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.484670 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_950a31f4-607a-4c61-aa2c-606959f96b3b/cinder-backup/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.520267 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a603a685-c7c9-4ce2-b5dc-d198f02049ed/cinder-scheduler/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.639917 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a603a685-c7c9-4ce2-b5dc-d198f02049ed/probe/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.715474 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd944da3-1a0a-4c06-b3fe-9fcc8187fb01" path="/var/lib/kubelet/pods/dd944da3-1a0a-4c06-b3fe-9fcc8187fb01/volumes" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.716634 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9f0ed356-b135-473d-ad4c-8e21c287af13/probe/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.770373 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9f0ed356-b135-473d-ad4c-8e21c287af13/cinder-volume/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.856694 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-9mcpv_ddb85683-5e25-4c50-8df2-7f7fee07588d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:04 crc kubenswrapper[4622]: I1126 12:00:04.927413 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-m99bd_0f029b05-5927-4ba7-a37d-b2b44c00071d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.079359 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6885d49d55-25kwv_7144d84c-d70a-4566-bb04-1b0cb8e058fb/init/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.272765 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6885d49d55-25kwv_7144d84c-d70a-4566-bb04-1b0cb8e058fb/dnsmasq-dns/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.282276 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5cad4cf3-7cf8-43eb-b48d-954bb0d60d03/glance-httpd/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.304305 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6885d49d55-25kwv_7144d84c-d70a-4566-bb04-1b0cb8e058fb/init/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.473471 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e6cb4914-4902-4e5a-8c29-34f4136736f0/glance-log/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.476020 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5cad4cf3-7cf8-43eb-b48d-954bb0d60d03/glance-log/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.491899 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e6cb4914-4902-4e5a-8c29-34f4136736f0/glance-httpd/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.775171 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bc6fbfc58-jv7cz_2285a60b-0202-4abd-91de-7241e109804f/horizon/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.886421 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-ss25k_f5e33fbe-58e7-46b2-9202-b7c646631fda/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:05 crc kubenswrapper[4622]: I1126 12:00:05.905106 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bc6fbfc58-jv7cz_2285a60b-0202-4abd-91de-7241e109804f/horizon-log/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.033860 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-g4z46_e8fb2686-cd66-4e6e-ac7f-466cf5211f3e/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.166124 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5b79cbd499-vlhtt_eccaaa42-60dc-4995-b79e-c11979b75650/keystone-api/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.244363 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_70e0b4b2-a4e5-45fe-b654-a0aeed07e2f2/kube-state-metrics/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.338902 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-fr9q5_61960c03-d6c3-417c-8445-a485f622d6b1/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.453602 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-9ad5-account-create-update-sg5r5_2b956c55-fa7f-474e-a13a-a410f68ce795/mariadb-account-create-update/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.554888 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_be89b876-f22c-4a79-8dbd-d543a4165cf3/manila-api-log/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.589464 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_be89b876-f22c-4a79-8dbd-d543a4165cf3/manila-api/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.632765 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-create-7kxg5_a36f9d11-1586-4876-afeb-a197d5479f7c/mariadb-database-create/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.734893 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-sync-mq94p_ae6e0966-f7e8-4460-97e1-2c9effc3f080/manila-db-sync/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.872335 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_dba7b70b-838a-4828-a4ae-7464524b8217/probe/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.872531 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_dba7b70b-838a-4828-a4ae-7464524b8217/manila-scheduler/0.log" Nov 26 12:00:06 crc kubenswrapper[4622]: I1126 12:00:06.990254 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_31e89058-d523-4206-9017-9d8c1c299094/manila-share/0.log" Nov 26 12:00:07 crc kubenswrapper[4622]: I1126 12:00:07.059486 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_31e89058-d523-4206-9017-9d8c1c299094/probe/0.log" Nov 26 12:00:07 crc kubenswrapper[4622]: I1126 12:00:07.248227 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b5456649c-2jjn8_86ef3a8d-79f3-49f4-8e47-cf74f7618f66/neutron-api/0.log" Nov 26 12:00:07 crc kubenswrapper[4622]: I1126 12:00:07.353910 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b5456649c-2jjn8_86ef3a8d-79f3-49f4-8e47-cf74f7618f66/neutron-httpd/0.log" Nov 26 12:00:07 crc kubenswrapper[4622]: I1126 12:00:07.368755 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-k7jw8_2b278dc2-9da9-470a-93b5-4918011b54a0/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:07 crc kubenswrapper[4622]: I1126 12:00:07.876148 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ca80895b-9518-45be-8896-06591e9356b2/nova-api-log/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.042284 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_d950a2ce-da5a-486f-9459-0da7366810fe/nova-cell0-conductor-conductor/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.095082 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ca80895b-9518-45be-8896-06591e9356b2/nova-api-api/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.172723 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_851f7097-5531-4bf6-9768-2c1cd70db989/nova-cell1-conductor-conductor/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.337267 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_6afcd63d-fe69-405b-bb79-4dfa143d4651/nova-cell1-novncproxy-novncproxy/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.504399 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6strg_a8397a46-af89-4e1f-90fe-34d0d980d7a6/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.645473 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f079e6de-650d-45de-9ee0-bf7cc94c67b3/nova-metadata-log/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.838393 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e7fcc1bc-db6d-4da1-a8ee-cce4a759c8f7/nova-scheduler-scheduler/0.log" Nov 26 12:00:08 crc kubenswrapper[4622]: I1126 12:00:08.939713 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_395125e4-6b50-4032-811d-b474b647ed88/mysql-bootstrap/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.177220 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_395125e4-6b50-4032-811d-b474b647ed88/mysql-bootstrap/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.183564 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_395125e4-6b50-4032-811d-b474b647ed88/galera/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.307421 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_acbc91d6-2b39-4663-9501-ee36fda433ed/mysql-bootstrap/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.439320 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f079e6de-650d-45de-9ee0-bf7cc94c67b3/nova-metadata-metadata/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.486810 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_acbc91d6-2b39-4663-9501-ee36fda433ed/mysql-bootstrap/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.544964 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_acbc91d6-2b39-4663-9501-ee36fda433ed/galera/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.646802 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_fdfff739-ae1f-43ce-ad8a-f3b6608b78af/openstackclient/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.746870 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8n2wh_1fba939c-4058-428f-a359-ea4e031e9fb3/ovn-controller/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.882439 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ksljn_0a1057f8-4662-461b-93ba-84576271087b/openstack-network-exporter/0.log" Nov 26 12:00:09 crc kubenswrapper[4622]: I1126 12:00:09.974514 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mj2jt_90daf55b-343c-4b1f-990f-1ad602050cf9/ovsdb-server-init/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.139155 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mj2jt_90daf55b-343c-4b1f-990f-1ad602050cf9/ovs-vswitchd/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.148123 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mj2jt_90daf55b-343c-4b1f-990f-1ad602050cf9/ovsdb-server/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.186128 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mj2jt_90daf55b-343c-4b1f-990f-1ad602050cf9/ovsdb-server-init/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.358838 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-p8rkn_0ea4999e-068f-4516-b986-41a81f868f48/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.364385 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d6b4db61-f18b-40a7-a97d-17849df44c24/openstack-network-exporter/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.455693 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d6b4db61-f18b-40a7-a97d-17849df44c24/ovn-northd/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.562872 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_677a4ff3-7482-47f3-8f6d-6d96617fc000/openstack-network-exporter/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.656579 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_677a4ff3-7482-47f3-8f6d-6d96617fc000/ovsdbserver-nb/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.778111 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_781b9fbd-6f1d-48f9-be0a-8c15276f21a8/ovsdbserver-sb/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.817712 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_781b9fbd-6f1d-48f9-be0a-8c15276f21a8/openstack-network-exporter/0.log" Nov 26 12:00:10 crc kubenswrapper[4622]: I1126 12:00:10.910419 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bc8786d46-z584k_5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2/placement-api/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.001531 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bc8786d46-z584k_5eaa296b-2eb5-4729-ad5d-1b3bc85e93a2/placement-log/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.269614 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_28c34d7a-7439-4282-9ca3-c6f19cc47dda/setup-container/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.413005 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_28c34d7a-7439-4282-9ca3-c6f19cc47dda/setup-container/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.479734 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_28c34d7a-7439-4282-9ca3-c6f19cc47dda/rabbitmq/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.481456 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_556a2699-b555-4ae3-8aa0-8c545af95e25/setup-container/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.672969 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_556a2699-b555-4ae3-8aa0-8c545af95e25/setup-container/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.770802 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-bfrdk_6cd87990-9f66-40c9-ac5c-68b8755c46cd/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.807627 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_556a2699-b555-4ae3-8aa0-8c545af95e25/rabbitmq/0.log" Nov 26 12:00:11 crc kubenswrapper[4622]: I1126 12:00:11.970307 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-l9ffm_970a7d0b-90b1-4d00-9304-6ac2c06fb675/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:12 crc kubenswrapper[4622]: I1126 12:00:12.077758 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8f599_e1cc67a8-3b68-4f53-8e83-8986eaf29d05/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:12 crc kubenswrapper[4622]: I1126 12:00:12.178677 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-8gvkn_b7189366-2284-4019-b4c4-ecc32857a6c6/ssh-known-hosts-edpm-deployment/0.log" Nov 26 12:00:12 crc kubenswrapper[4622]: I1126 12:00:12.276468 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ggcbx_e0b0eff7-0c1f-4dff-a8b5-5f957caae0bb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 26 12:00:12 crc kubenswrapper[4622]: I1126 12:00:12.706228 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:00:12 crc kubenswrapper[4622]: E1126 12:00:12.706596 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:00:17 crc kubenswrapper[4622]: I1126 12:00:17.022269 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_30b6a0c4-d329-474a-a14e-8068b059d893/memcached/0.log" Nov 26 12:00:26 crc kubenswrapper[4622]: I1126 12:00:26.705922 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:00:26 crc kubenswrapper[4622]: E1126 12:00:26.706718 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.247969 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/util/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.387007 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/util/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.405115 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/pull/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.429559 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/pull/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.567180 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/extract/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.573764 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/pull/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.610619 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3711d63ee32771f989196ada809b479d54ff48bb9ee48b91ea976dcc5a7b8hz_fb60b1af-671e-46ac-ad17-3903260919b9/util/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.734349 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b64f4fb85-k2ggr_e952c5f2-5b48-4303-a533-e838c7a72c24/kube-rbac-proxy/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.775250 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b64f4fb85-k2ggr_e952c5f2-5b48-4303-a533-e838c7a72c24/manager/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.835419 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6b7f75547b-4244k_2d3369dc-6ada-4806-8bf7-088fe4c0b913/kube-rbac-proxy/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.939651 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6b7f75547b-4244k_2d3369dc-6ada-4806-8bf7-088fe4c0b913/manager/0.log" Nov 26 12:00:33 crc kubenswrapper[4622]: I1126 12:00:33.963237 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-955677c94-88ql9_202d189d-cd09-4574-bea4-ad76a0b82cc4/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.031893 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-955677c94-88ql9_202d189d-cd09-4574-bea4-ad76a0b82cc4/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.105346 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-589cbd6b5b-lrt59_b86c4e95-2314-4759-9b74-77c0c811fc82/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.212924 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-589cbd6b5b-lrt59_b86c4e95-2314-4759-9b74-77c0c811fc82/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.283850 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b77f656f-b4qw6_43521a88-13da-419a-9af7-b13a9c7c11f8/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.291712 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b77f656f-b4qw6_43521a88-13da-419a-9af7-b13a9c7c11f8/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.372015 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d494799bf-8h8x8_5031223b-1c18-4996-91c6-9a8a0db7a2eb/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.449962 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d494799bf-8h8x8_5031223b-1c18-4996-91c6-9a8a0db7a2eb/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.501752 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gzvb9_af470876-f83d-453f-bc58-96c91cabc509/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.650036 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-67cb4dc6d4-2xzvm_3ccaeddc-377f-4950-b942-9420c4bbeaa6/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.680868 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-67cb4dc6d4-2xzvm_3ccaeddc-377f-4950-b942-9420c4bbeaa6/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.691287 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gzvb9_af470876-f83d-453f-bc58-96c91cabc509/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.809129 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b4567c7cf-9slpq_e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a/kube-rbac-proxy/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.860021 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b4567c7cf-9slpq_e718bbdf-bc0b-44a2-8e0a-aa5fc7f9390a/manager/0.log" Nov 26 12:00:34 crc kubenswrapper[4622]: I1126 12:00:34.953461 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5d499bf58b-nrn55_ff9243ae-4f66-4399-9ab0-67d195b2319f/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.015260 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5d499bf58b-nrn55_ff9243ae-4f66-4399-9ab0-67d195b2319f/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.063342 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-66f4dd4bc7-mrcsw_103f81dd-36e7-412f-a756-663e7d366a3a/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.138235 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-66f4dd4bc7-mrcsw_103f81dd-36e7-412f-a756-663e7d366a3a/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.251997 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6fdcddb789-8mgd6_9a113fe4-1281-4a2f-896c-bac8b89ce952/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.253164 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6fdcddb789-8mgd6_9a113fe4-1281-4a2f-896c-bac8b89ce952/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.367611 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-wdjgz_79a68295-8240-4081-8c98-f75e2a04160b/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.446259 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-wdjgz_79a68295-8240-4081-8c98-f75e2a04160b/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.495261 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-64cdc6ff96-zc4pp_2596bf62-d165-4a25-95b6-888c3e8f5caa/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.537536 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-64cdc6ff96-zc4pp_2596bf62-d165-4a25-95b6-888c3e8f5caa/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.597923 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-674cb676c8pzq88_421c5332-a5be-406b-a039-e44918747042/kube-rbac-proxy/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.656542 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-674cb676c8pzq88_421c5332-a5be-406b-a039-e44918747042/manager/0.log" Nov 26 12:00:35 crc kubenswrapper[4622]: I1126 12:00:35.835347 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7b8f89988-7flbr_93a6cdc7-be83-4eba-af13-f1bc89429adf/operator/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.055105 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nctx2_9aa64d20-32c0-4ded-856e-a5f4b01cef1e/registry-server/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.152347 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-56897c768d-jvn88_56fff709-6338-4ce4-883c-699491e7d55e/kube-rbac-proxy/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.249199 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-57988cc5b5-mn2wz_0694f996-33c7-4190-b883-0e404ce778f7/kube-rbac-proxy/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.337401 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-56897c768d-jvn88_56fff709-6338-4ce4-883c-699491e7d55e/manager/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.411718 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-57988cc5b5-mn2wz_0694f996-33c7-4190-b883-0e404ce778f7/manager/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.506120 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-xsfhw_d402353c-5e0f-4f3c-b974-c327ed33adb9/operator/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.592257 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d77b94747-zhrsj_d30b60e7-2165-4660-a037-5a3c47f807e5/kube-rbac-proxy/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.673340 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d77b94747-zhrsj_d30b60e7-2165-4660-a037-5a3c47f807e5/manager/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.770034 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-8rlt2_c048fbdd-e084-4e64-9d3b-c9d8083b9019/kube-rbac-proxy/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.921149 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-8rlt2_c048fbdd-e084-4e64-9d3b-c9d8083b9019/manager/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.924004 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-659d75f7c6-q9vn7_903a6422-4d68-4aab-96e8-25452ffab66d/manager/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.926411 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd6c7f4c8-9zzhk_8ef536a2-d861-4672-94e6-5058fc76eb85/kube-rbac-proxy/0.log" Nov 26 12:00:36 crc kubenswrapper[4622]: I1126 12:00:36.978131 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd6c7f4c8-9zzhk_8ef536a2-d861-4672-94e6-5058fc76eb85/manager/0.log" Nov 26 12:00:37 crc kubenswrapper[4622]: I1126 12:00:37.080318 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-76b59d6455-m8xbs_3b8a350c-99de-4c01-ae25-cf10b5247f9a/kube-rbac-proxy/0.log" Nov 26 12:00:37 crc kubenswrapper[4622]: I1126 12:00:37.176024 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-656dcb59d4-mrntw_671c4f3d-053f-4261-bac4-537e9f4c88fa/kube-rbac-proxy/0.log" Nov 26 12:00:37 crc kubenswrapper[4622]: I1126 12:00:37.233654 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-656dcb59d4-mrntw_671c4f3d-053f-4261-bac4-537e9f4c88fa/manager/0.log" Nov 26 12:00:38 crc kubenswrapper[4622]: I1126 12:00:38.706921 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:00:38 crc kubenswrapper[4622]: E1126 12:00:38.707424 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:00:45 crc kubenswrapper[4622]: I1126 12:00:45.680004 4622 scope.go:117] "RemoveContainer" containerID="be58c9e1fa65f20292e110b611da1a3c9bf8d509cddde7b946b2e9f43d682ef5" Nov 26 12:00:52 crc kubenswrapper[4622]: I1126 12:00:52.146670 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7qcfs_4c8189e8-b292-48af-8a28-23021c696ba5/control-plane-machine-set-operator/0.log" Nov 26 12:00:52 crc kubenswrapper[4622]: I1126 12:00:52.322185 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vprnt_ac45a607-6d21-475e-8f82-ed9bbcbb1f65/machine-api-operator/0.log" Nov 26 12:00:52 crc kubenswrapper[4622]: I1126 12:00:52.349459 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vprnt_ac45a607-6d21-475e-8f82-ed9bbcbb1f65/kube-rbac-proxy/0.log" Nov 26 12:00:52 crc kubenswrapper[4622]: I1126 12:00:52.707420 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:00:52 crc kubenswrapper[4622]: E1126 12:00:52.707638 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.151167 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29402641-zj9mf"] Nov 26 12:01:00 crc kubenswrapper[4622]: E1126 12:01:00.152039 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="accfe111-409a-479c-9ffd-7ed26521032f" containerName="collect-profiles" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.152054 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="accfe111-409a-479c-9ffd-7ed26521032f" containerName="collect-profiles" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.152263 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="accfe111-409a-479c-9ffd-7ed26521032f" containerName="collect-profiles" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.152901 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.164408 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29402641-zj9mf"] Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.335112 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxj6f\" (UniqueName: \"kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.335277 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.335546 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.336067 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.439114 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.439177 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxj6f\" (UniqueName: \"kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.439267 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.439325 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.446531 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.448403 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.454838 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.461712 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxj6f\" (UniqueName: \"kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f\") pod \"keystone-cron-29402641-zj9mf\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.473981 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:00 crc kubenswrapper[4622]: I1126 12:01:00.887284 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29402641-zj9mf"] Nov 26 12:01:01 crc kubenswrapper[4622]: I1126 12:01:01.087595 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29402641-zj9mf" event={"ID":"0859609b-785d-4353-beed-7d3fc7aed086","Type":"ContainerStarted","Data":"eddd7628f314c30b8ec985b5f7dcfcecebb8dc2f4ac9d4275453aec958f27150"} Nov 26 12:01:01 crc kubenswrapper[4622]: I1126 12:01:01.087881 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29402641-zj9mf" event={"ID":"0859609b-785d-4353-beed-7d3fc7aed086","Type":"ContainerStarted","Data":"c528b6c4b69053081ce26f8868cc45343e94b31887cc496dabd7bf59cdc264e7"} Nov 26 12:01:01 crc kubenswrapper[4622]: I1126 12:01:01.108434 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29402641-zj9mf" podStartSLOduration=1.108421392 podStartE2EDuration="1.108421392s" podCreationTimestamp="2025-11-26 12:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-26 12:01:01.100791595 +0000 UTC m=+3020.692003117" watchObservedRunningTime="2025-11-26 12:01:01.108421392 +0000 UTC m=+3020.699632914" Nov 26 12:01:02 crc kubenswrapper[4622]: I1126 12:01:02.779954 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hv4z2_c7ab3421-7274-4a71-b54a-dc4955028478/cert-manager-controller/0.log" Nov 26 12:01:02 crc kubenswrapper[4622]: I1126 12:01:02.946611 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-tcw8n_34de43db-05eb-494e-a5a2-f0a4979dabb5/cert-manager-cainjector/0.log" Nov 26 12:01:02 crc kubenswrapper[4622]: I1126 12:01:02.981159 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-fjmks_b4640717-d4ad-4231-aef4-273e00c48ec2/cert-manager-webhook/0.log" Nov 26 12:01:03 crc kubenswrapper[4622]: I1126 12:01:03.102716 4622 generic.go:334] "Generic (PLEG): container finished" podID="0859609b-785d-4353-beed-7d3fc7aed086" containerID="eddd7628f314c30b8ec985b5f7dcfcecebb8dc2f4ac9d4275453aec958f27150" exitCode=0 Nov 26 12:01:03 crc kubenswrapper[4622]: I1126 12:01:03.102762 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29402641-zj9mf" event={"ID":"0859609b-785d-4353-beed-7d3fc7aed086","Type":"ContainerDied","Data":"eddd7628f314c30b8ec985b5f7dcfcecebb8dc2f4ac9d4275453aec958f27150"} Nov 26 12:01:03 crc kubenswrapper[4622]: I1126 12:01:03.706683 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:01:03 crc kubenswrapper[4622]: E1126 12:01:03.707287 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.408001 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.537562 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data\") pod \"0859609b-785d-4353-beed-7d3fc7aed086\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.537679 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle\") pod \"0859609b-785d-4353-beed-7d3fc7aed086\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.537835 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxj6f\" (UniqueName: \"kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f\") pod \"0859609b-785d-4353-beed-7d3fc7aed086\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.537884 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys\") pod \"0859609b-785d-4353-beed-7d3fc7aed086\" (UID: \"0859609b-785d-4353-beed-7d3fc7aed086\") " Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.546335 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f" (OuterVolumeSpecName: "kube-api-access-qxj6f") pod "0859609b-785d-4353-beed-7d3fc7aed086" (UID: "0859609b-785d-4353-beed-7d3fc7aed086"). InnerVolumeSpecName "kube-api-access-qxj6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.546699 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0859609b-785d-4353-beed-7d3fc7aed086" (UID: "0859609b-785d-4353-beed-7d3fc7aed086"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.569248 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0859609b-785d-4353-beed-7d3fc7aed086" (UID: "0859609b-785d-4353-beed-7d3fc7aed086"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.589381 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data" (OuterVolumeSpecName: "config-data") pod "0859609b-785d-4353-beed-7d3fc7aed086" (UID: "0859609b-785d-4353-beed-7d3fc7aed086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.640845 4622 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-config-data\") on node \"crc\" DevicePath \"\"" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.640891 4622 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.640916 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxj6f\" (UniqueName: \"kubernetes.io/projected/0859609b-785d-4353-beed-7d3fc7aed086-kube-api-access-qxj6f\") on node \"crc\" DevicePath \"\"" Nov 26 12:01:04 crc kubenswrapper[4622]: I1126 12:01:04.640930 4622 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0859609b-785d-4353-beed-7d3fc7aed086-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 26 12:01:05 crc kubenswrapper[4622]: I1126 12:01:05.124183 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29402641-zj9mf" event={"ID":"0859609b-785d-4353-beed-7d3fc7aed086","Type":"ContainerDied","Data":"c528b6c4b69053081ce26f8868cc45343e94b31887cc496dabd7bf59cdc264e7"} Nov 26 12:01:05 crc kubenswrapper[4622]: I1126 12:01:05.124833 4622 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c528b6c4b69053081ce26f8868cc45343e94b31887cc496dabd7bf59cdc264e7" Nov 26 12:01:05 crc kubenswrapper[4622]: I1126 12:01:05.124255 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29402641-zj9mf" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.276244 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-8zb8q_0f2b214a-5b02-4ad5-906f-eda41d105d39/nmstate-console-plugin/0.log" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.443612 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-56hb5_42a26eeb-75bc-4cf4-80d4-c448ba54cf53/nmstate-handler/0.log" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.476829 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-xxnmq_4a4797ef-0155-4c03-af71-90ad8b7bb9ce/nmstate-metrics/0.log" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.478946 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-xxnmq_4a4797ef-0155-4c03-af71-90ad8b7bb9ce/kube-rbac-proxy/0.log" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.619838 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-w28b4_524f0fb3-a92d-4e3e-b07c-8d7ee8556d05/nmstate-operator/0.log" Nov 26 12:01:13 crc kubenswrapper[4622]: I1126 12:01:13.652540 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-4rmtm_57e7baad-e597-4021-8a23-961efc666e1d/nmstate-webhook/0.log" Nov 26 12:01:15 crc kubenswrapper[4622]: I1126 12:01:15.706401 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:01:15 crc kubenswrapper[4622]: E1126 12:01:15.706960 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:01:24 crc kubenswrapper[4622]: I1126 12:01:24.958626 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-t4v8v_501feec2-41da-496a-917e-7c032d616625/kube-rbac-proxy/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.077010 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-t4v8v_501feec2-41da-496a-917e-7c032d616625/controller/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.140874 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-frr-files/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.309201 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-reloader/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.310057 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-frr-files/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.340381 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-reloader/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.353539 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-metrics/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.495277 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-frr-files/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.514981 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-reloader/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.517037 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-metrics/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.519261 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-metrics/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.687463 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-frr-files/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.690514 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-metrics/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.695635 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/controller/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.713218 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/cp-reloader/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.831398 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/frr-metrics/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.875051 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/kube-rbac-proxy/0.log" Nov 26 12:01:25 crc kubenswrapper[4622]: I1126 12:01:25.916062 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/kube-rbac-proxy-frr/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.061427 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/reloader/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.081824 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-gtbw2_ca0f25fe-37e0-4346-9fcc-96152cb67a78/frr-k8s-webhook-server/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.274451 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-574b75b4b5-vqg2q_70d2aa06-6937-4f5c-9e75-17a777eb7e78/manager/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.446213 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5765679bfc-p7wlr_1b655923-a88c-49ea-938e-bff6999e007a/webhook-server/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.492444 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wb55x_9c03ee24-a489-4550-9f60-c1839cf29670/kube-rbac-proxy/0.log" Nov 26 12:01:26 crc kubenswrapper[4622]: I1126 12:01:26.980897 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wb55x_9c03ee24-a489-4550-9f60-c1839cf29670/speaker/0.log" Nov 26 12:01:27 crc kubenswrapper[4622]: I1126 12:01:27.143106 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fts9w_a6cf6aa2-a4e9-4d0b-8fd9-165e76607d59/frr/0.log" Nov 26 12:01:28 crc kubenswrapper[4622]: I1126 12:01:28.707247 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:01:28 crc kubenswrapper[4622]: E1126 12:01:28.707743 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:01:35 crc kubenswrapper[4622]: I1126 12:01:35.919889 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/util/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.067594 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/pull/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.070356 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/pull/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.076390 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/util/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.236126 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/util/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.256697 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/extract/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.274879 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772e6zdpm_b4f1470f-5ed2-4ee3-a3c3-21da8791ef2d/pull/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.371644 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-utilities/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.512620 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-utilities/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.529408 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-content/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.532022 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-content/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.664712 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-utilities/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.664853 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/extract-content/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.851573 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-utilities/0.log" Nov 26 12:01:36 crc kubenswrapper[4622]: I1126 12:01:36.992211 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pvq22_33de4658-ea3b-4ebe-bec1-ed6190d6f2c2/registry-server/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.008462 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-utilities/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.018578 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-content/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.077584 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-content/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.176324 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-utilities/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.202565 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/extract-content/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.362187 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/util/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.583832 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/util/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.589939 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/pull/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.633900 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/pull/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.657007 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hlg7h_baf7a2ad-d38e-4569-ad12-d346fc0abfed/registry-server/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.761610 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/util/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.787000 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/pull/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.811695 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6fcq4k_ea5f56ee-1066-4bb3-b055-1550d4fd9fd8/extract/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.903086 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-f7zqw_f2df0e8a-78e0-4d5a-8d31-e7744df07db7/marketplace-operator/0.log" Nov 26 12:01:37 crc kubenswrapper[4622]: I1126 12:01:37.960045 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-utilities/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.121917 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.121918 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-utilities/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.145073 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.292425 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.316211 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/extract-utilities/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.413434 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-t8s85_37486221-c1f5-4a72-9923-34c65efb3a0f/registry-server/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.442073 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-utilities/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.594496 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-utilities/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.595664 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.611421 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.765636 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-content/0.log" Nov 26 12:01:38 crc kubenswrapper[4622]: I1126 12:01:38.781434 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/extract-utilities/0.log" Nov 26 12:01:39 crc kubenswrapper[4622]: I1126 12:01:39.056676 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f7b64_2dda58d5-346a-4eeb-9942-38221a073d22/registry-server/0.log" Nov 26 12:01:42 crc kubenswrapper[4622]: I1126 12:01:42.708031 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:01:42 crc kubenswrapper[4622]: E1126 12:01:42.709104 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:01:51 crc kubenswrapper[4622]: E1126 12:01:51.712601 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 12:01:51 crc kubenswrapper[4622]: E1126 12:01:51.713320 4622 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 12:01:51 crc kubenswrapper[4622]: E1126 12:01:51.713555 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p446s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-76b59d6455-m8xbs_openstack-operators(3b8a350c-99de-4c01-ae25-cf10b5247f9a): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" logger="UnhandledError" Nov 26 12:01:51 crc kubenswrapper[4622]: E1126 12:01:51.714693 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \\\"http://38.102.83.113:5001/v2/\\\": dial tcp 38.102.83.113:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:01:53 crc kubenswrapper[4622]: I1126 12:01:53.706266 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:01:53 crc kubenswrapper[4622]: E1126 12:01:53.707151 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:02:02 crc kubenswrapper[4622]: E1126 12:02:02.710069 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:02:08 crc kubenswrapper[4622]: I1126 12:02:08.706204 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:02:08 crc kubenswrapper[4622]: E1126 12:02:08.707034 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:02:17 crc kubenswrapper[4622]: E1126 12:02:17.708137 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:02:20 crc kubenswrapper[4622]: I1126 12:02:20.713265 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:02:20 crc kubenswrapper[4622]: E1126 12:02:20.713776 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:02:28 crc kubenswrapper[4622]: E1126 12:02:28.708922 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:02:33 crc kubenswrapper[4622]: I1126 12:02:33.063822 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-7kxg5"] Nov 26 12:02:33 crc kubenswrapper[4622]: I1126 12:02:33.074706 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-7kxg5"] Nov 26 12:02:34 crc kubenswrapper[4622]: I1126 12:02:34.036496 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-9ad5-account-create-update-sg5r5"] Nov 26 12:02:34 crc kubenswrapper[4622]: I1126 12:02:34.043154 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-9ad5-account-create-update-sg5r5"] Nov 26 12:02:34 crc kubenswrapper[4622]: I1126 12:02:34.716437 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b956c55-fa7f-474e-a13a-a410f68ce795" path="/var/lib/kubelet/pods/2b956c55-fa7f-474e-a13a-a410f68ce795/volumes" Nov 26 12:02:34 crc kubenswrapper[4622]: I1126 12:02:34.717671 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a36f9d11-1586-4876-afeb-a197d5479f7c" path="/var/lib/kubelet/pods/a36f9d11-1586-4876-afeb-a197d5479f7c/volumes" Nov 26 12:02:35 crc kubenswrapper[4622]: I1126 12:02:35.706009 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:02:35 crc kubenswrapper[4622]: E1126 12:02:35.706403 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:02:45 crc kubenswrapper[4622]: I1126 12:02:45.739112 4622 scope.go:117] "RemoveContainer" containerID="2eed1bb67ac4da101e35ac2f5d3ac53bba65cfb9ffb791ec07827d2a7a3038e8" Nov 26 12:02:45 crc kubenswrapper[4622]: I1126 12:02:45.756420 4622 scope.go:117] "RemoveContainer" containerID="cc98ab4c4c58f1442afda7cfe63b3f049bad159cd4f14eda5844ec95e451f08f" Nov 26 12:02:47 crc kubenswrapper[4622]: I1126 12:02:47.038436 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-mq94p"] Nov 26 12:02:47 crc kubenswrapper[4622]: I1126 12:02:47.046608 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-mq94p"] Nov 26 12:02:48 crc kubenswrapper[4622]: I1126 12:02:48.714420 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae6e0966-f7e8-4460-97e1-2c9effc3f080" path="/var/lib/kubelet/pods/ae6e0966-f7e8-4460-97e1-2c9effc3f080/volumes" Nov 26 12:02:50 crc kubenswrapper[4622]: I1126 12:02:50.712285 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:02:50 crc kubenswrapper[4622]: E1126 12:02:50.712956 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:02:57 crc kubenswrapper[4622]: I1126 12:02:57.074102 4622 generic.go:334] "Generic (PLEG): container finished" podID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerID="6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e" exitCode=0 Nov 26 12:02:57 crc kubenswrapper[4622]: I1126 12:02:57.074184 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7khvt/must-gather-hbdnk" event={"ID":"5ca03bf9-3a97-4510-bc3f-dd32381bb44d","Type":"ContainerDied","Data":"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e"} Nov 26 12:02:57 crc kubenswrapper[4622]: I1126 12:02:57.075439 4622 scope.go:117] "RemoveContainer" containerID="6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e" Nov 26 12:02:57 crc kubenswrapper[4622]: I1126 12:02:57.755287 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7khvt_must-gather-hbdnk_5ca03bf9-3a97-4510-bc3f-dd32381bb44d/gather/0.log" Nov 26 12:03:02 crc kubenswrapper[4622]: I1126 12:03:02.706081 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:03:02 crc kubenswrapper[4622]: E1126 12:03:02.707027 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k565w_openshift-machine-config-operator(b4b2dbdb-8e61-40e9-86ae-3dba474c215b)\"" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.214467 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7khvt/must-gather-hbdnk"] Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.215126 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-7khvt/must-gather-hbdnk" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="copy" containerID="cri-o://604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4" gracePeriod=2 Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.229047 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7khvt/must-gather-hbdnk"] Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.581995 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7khvt_must-gather-hbdnk_5ca03bf9-3a97-4510-bc3f-dd32381bb44d/copy/0.log" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.582380 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.597025 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output\") pod \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.597354 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x42w\" (UniqueName: \"kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w\") pod \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\" (UID: \"5ca03bf9-3a97-4510-bc3f-dd32381bb44d\") " Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.603288 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w" (OuterVolumeSpecName: "kube-api-access-4x42w") pod "5ca03bf9-3a97-4510-bc3f-dd32381bb44d" (UID: "5ca03bf9-3a97-4510-bc3f-dd32381bb44d"). InnerVolumeSpecName "kube-api-access-4x42w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.702070 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x42w\" (UniqueName: \"kubernetes.io/projected/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-kube-api-access-4x42w\") on node \"crc\" DevicePath \"\"" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.725732 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5ca03bf9-3a97-4510-bc3f-dd32381bb44d" (UID: "5ca03bf9-3a97-4510-bc3f-dd32381bb44d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 12:03:05 crc kubenswrapper[4622]: I1126 12:03:05.803949 4622 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ca03bf9-3a97-4510-bc3f-dd32381bb44d-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.168675 4622 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7khvt_must-gather-hbdnk_5ca03bf9-3a97-4510-bc3f-dd32381bb44d/copy/0.log" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.169265 4622 generic.go:334] "Generic (PLEG): container finished" podID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerID="604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4" exitCode=143 Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.169326 4622 scope.go:117] "RemoveContainer" containerID="604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.169402 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7khvt/must-gather-hbdnk" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.185553 4622 scope.go:117] "RemoveContainer" containerID="6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.212028 4622 scope.go:117] "RemoveContainer" containerID="604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4" Nov 26 12:03:06 crc kubenswrapper[4622]: E1126 12:03:06.212566 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4\": container with ID starting with 604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4 not found: ID does not exist" containerID="604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.212606 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4"} err="failed to get container status \"604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4\": rpc error: code = NotFound desc = could not find container \"604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4\": container with ID starting with 604db561c1eea3b32146bc500946cbbfcecaad07619673e0c002a388661c12d4 not found: ID does not exist" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.212643 4622 scope.go:117] "RemoveContainer" containerID="6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e" Nov 26 12:03:06 crc kubenswrapper[4622]: E1126 12:03:06.213075 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e\": container with ID starting with 6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e not found: ID does not exist" containerID="6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.213124 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e"} err="failed to get container status \"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e\": rpc error: code = NotFound desc = could not find container \"6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e\": container with ID starting with 6a8498608393b55e70e623680800716568c630c8be61ba58c60469bfa052c18e not found: ID does not exist" Nov 26 12:03:06 crc kubenswrapper[4622]: I1126 12:03:06.717187 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" path="/var/lib/kubelet/pods/5ca03bf9-3a97-4510-bc3f-dd32381bb44d/volumes" Nov 26 12:03:15 crc kubenswrapper[4622]: I1126 12:03:15.706497 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" Nov 26 12:03:16 crc kubenswrapper[4622]: I1126 12:03:16.270805 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"da4aac5b821c844f3555f6720416da1f018c03df13f144beb2dadf5623f3d044"} Nov 26 12:03:45 crc kubenswrapper[4622]: I1126 12:03:45.826286 4622 scope.go:117] "RemoveContainer" containerID="bff967207a4747f826e61edde3a078c16e5ab51886aa1670e85f3d83552bb10e" Nov 26 12:04:41 crc kubenswrapper[4622]: E1126 12:04:41.714257 4622 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 12:04:41 crc kubenswrapper[4622]: E1126 12:04:41.714800 4622 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" image="38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1" Nov 26 12:04:41 crc kubenswrapper[4622]: E1126 12:04:41.714950 4622 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p446s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-76b59d6455-m8xbs_openstack-operators(3b8a350c-99de-4c01-ae25-cf10b5247f9a): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \"http://38.102.83.113:5001/v2/\": dial tcp 38.102.83.113:5001: i/o timeout" logger="UnhandledError" Nov 26 12:04:41 crc kubenswrapper[4622]: E1126 12:04:41.716048 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1: pinging container registry 38.102.83.113:5001: Get \\\"http://38.102.83.113:5001/v2/\\\": dial tcp 38.102.83.113:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:04:56 crc kubenswrapper[4622]: E1126 12:04:56.709655 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:05:10 crc kubenswrapper[4622]: E1126 12:05:10.716258 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:05:15 crc kubenswrapper[4622]: I1126 12:05:15.199378 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 12:05:15 crc kubenswrapper[4622]: I1126 12:05:15.200160 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.195190 4622 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:21 crc kubenswrapper[4622]: E1126 12:05:21.196104 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="copy" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196120 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="copy" Nov 26 12:05:21 crc kubenswrapper[4622]: E1126 12:05:21.196135 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0859609b-785d-4353-beed-7d3fc7aed086" containerName="keystone-cron" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196141 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="0859609b-785d-4353-beed-7d3fc7aed086" containerName="keystone-cron" Nov 26 12:05:21 crc kubenswrapper[4622]: E1126 12:05:21.196168 4622 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="gather" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196175 4622 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="gather" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196391 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="gather" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196415 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="0859609b-785d-4353-beed-7d3fc7aed086" containerName="keystone-cron" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.196432 4622 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ca03bf9-3a97-4510-bc3f-dd32381bb44d" containerName="copy" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.197908 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.209930 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.241202 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.241263 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkggs\" (UniqueName: \"kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.241390 4622 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.343597 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.343729 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.343754 4622 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkggs\" (UniqueName: \"kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.344156 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.344193 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.364676 4622 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkggs\" (UniqueName: \"kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs\") pod \"redhat-operators-zdnnw\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.517393 4622 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:21 crc kubenswrapper[4622]: I1126 12:05:21.949510 4622 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:22 crc kubenswrapper[4622]: I1126 12:05:22.538880 4622 generic.go:334] "Generic (PLEG): container finished" podID="c3f4de5a-89ab-4065-b962-df32521bc9b6" containerID="1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9" exitCode=0 Nov 26 12:05:22 crc kubenswrapper[4622]: I1126 12:05:22.538923 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerDied","Data":"1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9"} Nov 26 12:05:22 crc kubenswrapper[4622]: I1126 12:05:22.539197 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerStarted","Data":"cac3bd3061ec01c62a3ea1a73521b62163ba664136aef3ed9cebe8d2b1daa6f5"} Nov 26 12:05:22 crc kubenswrapper[4622]: I1126 12:05:22.540782 4622 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 26 12:05:22 crc kubenswrapper[4622]: E1126 12:05:22.708807 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:05:24 crc kubenswrapper[4622]: I1126 12:05:24.558085 4622 generic.go:334] "Generic (PLEG): container finished" podID="c3f4de5a-89ab-4065-b962-df32521bc9b6" containerID="7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34" exitCode=0 Nov 26 12:05:24 crc kubenswrapper[4622]: I1126 12:05:24.558142 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerDied","Data":"7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34"} Nov 26 12:05:25 crc kubenswrapper[4622]: I1126 12:05:25.569490 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerStarted","Data":"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb"} Nov 26 12:05:25 crc kubenswrapper[4622]: I1126 12:05:25.595966 4622 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zdnnw" podStartSLOduration=1.85495222 podStartE2EDuration="4.595949013s" podCreationTimestamp="2025-11-26 12:05:21 +0000 UTC" firstStartedPulling="2025-11-26 12:05:22.540581406 +0000 UTC m=+3282.131792928" lastFinishedPulling="2025-11-26 12:05:25.281578199 +0000 UTC m=+3284.872789721" observedRunningTime="2025-11-26 12:05:25.588091511 +0000 UTC m=+3285.179303034" watchObservedRunningTime="2025-11-26 12:05:25.595949013 +0000 UTC m=+3285.187160534" Nov 26 12:05:31 crc kubenswrapper[4622]: I1126 12:05:31.518371 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:31 crc kubenswrapper[4622]: I1126 12:05:31.518924 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:31 crc kubenswrapper[4622]: I1126 12:05:31.560142 4622 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:31 crc kubenswrapper[4622]: I1126 12:05:31.663576 4622 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:31 crc kubenswrapper[4622]: I1126 12:05:31.791031 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:33 crc kubenswrapper[4622]: I1126 12:05:33.647955 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zdnnw" podUID="c3f4de5a-89ab-4065-b962-df32521bc9b6" containerName="registry-server" containerID="cri-o://7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb" gracePeriod=2 Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.056173 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.168885 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkggs\" (UniqueName: \"kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs\") pod \"c3f4de5a-89ab-4065-b962-df32521bc9b6\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.168999 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content\") pod \"c3f4de5a-89ab-4065-b962-df32521bc9b6\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.169425 4622 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities\") pod \"c3f4de5a-89ab-4065-b962-df32521bc9b6\" (UID: \"c3f4de5a-89ab-4065-b962-df32521bc9b6\") " Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.170207 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities" (OuterVolumeSpecName: "utilities") pod "c3f4de5a-89ab-4065-b962-df32521bc9b6" (UID: "c3f4de5a-89ab-4065-b962-df32521bc9b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.174695 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs" (OuterVolumeSpecName: "kube-api-access-xkggs") pod "c3f4de5a-89ab-4065-b962-df32521bc9b6" (UID: "c3f4de5a-89ab-4065-b962-df32521bc9b6"). InnerVolumeSpecName "kube-api-access-xkggs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.272663 4622 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-utilities\") on node \"crc\" DevicePath \"\"" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.272697 4622 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkggs\" (UniqueName: \"kubernetes.io/projected/c3f4de5a-89ab-4065-b962-df32521bc9b6-kube-api-access-xkggs\") on node \"crc\" DevicePath \"\"" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.661811 4622 generic.go:334] "Generic (PLEG): container finished" podID="c3f4de5a-89ab-4065-b962-df32521bc9b6" containerID="7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb" exitCode=0 Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.661913 4622 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdnnw" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.662181 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerDied","Data":"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb"} Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.662226 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdnnw" event={"ID":"c3f4de5a-89ab-4065-b962-df32521bc9b6","Type":"ContainerDied","Data":"cac3bd3061ec01c62a3ea1a73521b62163ba664136aef3ed9cebe8d2b1daa6f5"} Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.662285 4622 scope.go:117] "RemoveContainer" containerID="7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.682063 4622 scope.go:117] "RemoveContainer" containerID="7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.707685 4622 scope.go:117] "RemoveContainer" containerID="1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.747212 4622 scope.go:117] "RemoveContainer" containerID="7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb" Nov 26 12:05:34 crc kubenswrapper[4622]: E1126 12:05:34.747767 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb\": container with ID starting with 7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb not found: ID does not exist" containerID="7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.747830 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb"} err="failed to get container status \"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb\": rpc error: code = NotFound desc = could not find container \"7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb\": container with ID starting with 7741888e73625636f7b1837e2026b7bcbee22cbe9198e5f27a45544ee7cfb2eb not found: ID does not exist" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.747863 4622 scope.go:117] "RemoveContainer" containerID="7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34" Nov 26 12:05:34 crc kubenswrapper[4622]: E1126 12:05:34.748329 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34\": container with ID starting with 7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34 not found: ID does not exist" containerID="7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.748365 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34"} err="failed to get container status \"7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34\": rpc error: code = NotFound desc = could not find container \"7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34\": container with ID starting with 7464dc5b7870ca21a3deb1e53c02d8f75f735c70eb721746dfd8537311b51f34 not found: ID does not exist" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.748404 4622 scope.go:117] "RemoveContainer" containerID="1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9" Nov 26 12:05:34 crc kubenswrapper[4622]: E1126 12:05:34.748779 4622 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9\": container with ID starting with 1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9 not found: ID does not exist" containerID="1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9" Nov 26 12:05:34 crc kubenswrapper[4622]: I1126 12:05:34.748813 4622 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9"} err="failed to get container status \"1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9\": rpc error: code = NotFound desc = could not find container \"1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9\": container with ID starting with 1561b233d6b4888757cb719afc5818c2da7e94b475394f5c0143d519776f3ef9 not found: ID does not exist" Nov 26 12:05:35 crc kubenswrapper[4622]: I1126 12:05:35.157522 4622 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3f4de5a-89ab-4065-b962-df32521bc9b6" (UID: "c3f4de5a-89ab-4065-b962-df32521bc9b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 26 12:05:35 crc kubenswrapper[4622]: I1126 12:05:35.191589 4622 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3f4de5a-89ab-4065-b962-df32521bc9b6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 26 12:05:35 crc kubenswrapper[4622]: I1126 12:05:35.295770 4622 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:35 crc kubenswrapper[4622]: I1126 12:05:35.303280 4622 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zdnnw"] Nov 26 12:05:35 crc kubenswrapper[4622]: E1126 12:05:35.708938 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:05:36 crc kubenswrapper[4622]: I1126 12:05:36.717619 4622 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3f4de5a-89ab-4065-b962-df32521bc9b6" path="/var/lib/kubelet/pods/c3f4de5a-89ab-4065-b962-df32521bc9b6/volumes" Nov 26 12:05:45 crc kubenswrapper[4622]: I1126 12:05:45.198638 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 12:05:45 crc kubenswrapper[4622]: I1126 12:05:45.200973 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 12:05:45 crc kubenswrapper[4622]: I1126 12:05:45.918017 4622 scope.go:117] "RemoveContainer" containerID="618fb05c3697dbd648c3836c3ff2c7206192723e76396506f36c4374543464c5" Nov 26 12:05:47 crc kubenswrapper[4622]: E1126 12:05:47.708741 4622 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.113:5001/openstack-k8s-operators/test-operator:eeeb92f25f8fc5eae7d17127e44dff2ce526fcd1\\\"\"" pod="openstack-operators/test-operator-controller-manager-76b59d6455-m8xbs" podUID="3b8a350c-99de-4c01-ae25-cf10b5247f9a" Nov 26 12:06:15 crc kubenswrapper[4622]: I1126 12:06:15.201148 4622 patch_prober.go:28] interesting pod/machine-config-daemon-k565w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 26 12:06:15 crc kubenswrapper[4622]: I1126 12:06:15.202064 4622 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 26 12:06:15 crc kubenswrapper[4622]: I1126 12:06:15.202157 4622 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k565w" Nov 26 12:06:15 crc kubenswrapper[4622]: I1126 12:06:15.203676 4622 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"da4aac5b821c844f3555f6720416da1f018c03df13f144beb2dadf5623f3d044"} pod="openshift-machine-config-operator/machine-config-daemon-k565w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 26 12:06:15 crc kubenswrapper[4622]: I1126 12:06:15.203755 4622 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k565w" podUID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerName="machine-config-daemon" containerID="cri-o://da4aac5b821c844f3555f6720416da1f018c03df13f144beb2dadf5623f3d044" gracePeriod=600 Nov 26 12:06:16 crc kubenswrapper[4622]: I1126 12:06:16.047859 4622 generic.go:334] "Generic (PLEG): container finished" podID="b4b2dbdb-8e61-40e9-86ae-3dba474c215b" containerID="da4aac5b821c844f3555f6720416da1f018c03df13f144beb2dadf5623f3d044" exitCode=0 Nov 26 12:06:16 crc kubenswrapper[4622]: I1126 12:06:16.048027 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerDied","Data":"da4aac5b821c844f3555f6720416da1f018c03df13f144beb2dadf5623f3d044"} Nov 26 12:06:16 crc kubenswrapper[4622]: I1126 12:06:16.048598 4622 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k565w" event={"ID":"b4b2dbdb-8e61-40e9-86ae-3dba474c215b","Type":"ContainerStarted","Data":"cb5b3a7b958c7c721ecfcf8c14121afdad236bbffb444bfb0542ae7fc6871577"} Nov 26 12:06:16 crc kubenswrapper[4622]: I1126 12:06:16.048633 4622 scope.go:117] "RemoveContainer" containerID="ed0ff7531ac244279af747f8aa582ce6e5cdd3a7cc8067090399f1c255fe7515" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515111566560024453 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015111566561017371 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111557615016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111557615015464 5ustar corecore